diff --git a/train_unsloth.ipynb b/train_unsloth.ipynb
new file mode 100644
index 0000000..9d62008
--- /dev/null
+++ b/train_unsloth.ipynb
@@ -0,0 +1,10069 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "f9f8a4ee",
+ "metadata": {
+ "id": "IqM-T1RTzY6C",
+ "papermill": {
+ "duration": 0.038159,
+ "end_time": "2024-03-28T00:08:52.505173",
+ "exception": false,
+ "start_time": "2024-03-28T00:08:52.467014",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "source": [
+ "To run this, press \"*Runtime*\" and press \"*Run all*\" on a **free** Tesla T4 Google Colab instance!\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
Join Discord if you need help + support us if you can!\n",
+ "
\n",
+ "\n",
+ "To install Unsloth on your own computer, follow the installation instructions on our Github page [here](https://github.com/unslothai/unsloth#installation-instructions---conda).\n",
+ "\n",
+ "You will learn how to do [data prep](#Data), how to [train](#Train), how to [run the model](#Inference), & [how to save it](#Save) (eg for Llama.cpp).\n",
+ "\n",
+ "This notebook uses the `ChatML` format for conversation style finetunes. We use [Open Assistant conversations](https://huggingface.co/datasets/philschmid/guanaco-sharegpt-style) in ShareGPT style."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "id": "4c970fa0",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2024-03-28T00:08:52.578683Z",
+ "iopub.status.busy": "2024-03-28T00:08:52.577956Z",
+ "iopub.status.idle": "2024-03-28T00:12:44.149130Z",
+ "shell.execute_reply": "2024-03-28T00:12:44.147764Z"
+ },
+ "id": "2eSvM9zX_2d3",
+ "papermill": {
+ "duration": 231.609576,
+ "end_time": "2024-03-28T00:12:44.151750",
+ "exception": false,
+ "start_time": "2024-03-28T00:08:52.542174",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Looking in indexes: https://download.pytorch.org/whl/cu121\r\n",
+ "Collecting xformers\r\n",
+ " Downloading https://download.pytorch.org/whl/cu121/xformers-0.0.25-cp310-cp310-manylinux2014_x86_64.whl (222.5 MB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m222.5/222.5 MB\u001b[0m \u001b[31m7.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hRequirement already satisfied: numpy in /opt/conda/lib/python3.10/site-packages (from xformers) (1.26.4)\r\n",
+ "Collecting torch==2.2.1 (from xformers)\r\n",
+ " Downloading https://download.pytorch.org/whl/cu121/torch-2.2.1%2Bcu121-cp310-cp310-linux_x86_64.whl (757.3 MB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m757.3/757.3 MB\u001b[0m \u001b[31m2.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hRequirement already satisfied: filelock in /opt/conda/lib/python3.10/site-packages (from torch==2.2.1->xformers) (3.13.1)\r\n",
+ "Requirement already satisfied: typing-extensions>=4.8.0 in /opt/conda/lib/python3.10/site-packages (from torch==2.2.1->xformers) (4.9.0)\r\n",
+ "Requirement already satisfied: sympy in /opt/conda/lib/python3.10/site-packages (from torch==2.2.1->xformers) (1.12)\r\n",
+ "Requirement already satisfied: networkx in /opt/conda/lib/python3.10/site-packages (from torch==2.2.1->xformers) (3.2.1)\r\n",
+ "Requirement already satisfied: jinja2 in /opt/conda/lib/python3.10/site-packages (from torch==2.2.1->xformers) (3.1.2)\r\n",
+ "Requirement already satisfied: fsspec in /opt/conda/lib/python3.10/site-packages (from torch==2.2.1->xformers) (2024.3.0)\r\n",
+ "Collecting nvidia-cuda-nvrtc-cu12==12.1.105 (from torch==2.2.1->xformers)\r\n",
+ " Downloading https://download.pytorch.org/whl/cu121/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (23.7 MB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m23.7/23.7 MB\u001b[0m \u001b[31m26.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hCollecting nvidia-cuda-runtime-cu12==12.1.105 (from torch==2.2.1->xformers)\r\n",
+ " Downloading https://download.pytorch.org/whl/cu121/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (823 kB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m823.6/823.6 kB\u001b[0m \u001b[31m40.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hCollecting nvidia-cuda-cupti-cu12==12.1.105 (from torch==2.2.1->xformers)\r\n",
+ " Downloading https://download.pytorch.org/whl/cu121/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (14.1 MB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m14.1/14.1 MB\u001b[0m \u001b[31m84.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hCollecting nvidia-cudnn-cu12==8.9.2.26 (from torch==2.2.1->xformers)\r\n",
+ " Downloading https://download.pytorch.org/whl/cu121/nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl (731.7 MB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m731.7/731.7 MB\u001b[0m \u001b[31m2.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hCollecting nvidia-cublas-cu12==12.1.3.1 (from torch==2.2.1->xformers)\r\n",
+ " Downloading https://download.pytorch.org/whl/cu121/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl (410.6 MB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m410.6/410.6 MB\u001b[0m \u001b[31m1.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hCollecting nvidia-cufft-cu12==11.0.2.54 (from torch==2.2.1->xformers)\r\n",
+ " Downloading https://download.pytorch.org/whl/cu121/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl (121.6 MB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m121.6/121.6 MB\u001b[0m \u001b[31m4.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hCollecting nvidia-curand-cu12==10.3.2.106 (from torch==2.2.1->xformers)\r\n",
+ " Downloading https://download.pytorch.org/whl/cu121/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl (56.5 MB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m56.5/56.5 MB\u001b[0m \u001b[31m28.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hCollecting nvidia-cusolver-cu12==11.4.5.107 (from torch==2.2.1->xformers)\r\n",
+ " Downloading https://download.pytorch.org/whl/cu121/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl (124.2 MB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m124.2/124.2 MB\u001b[0m \u001b[31m12.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hCollecting nvidia-cusparse-cu12==12.1.0.106 (from torch==2.2.1->xformers)\r\n",
+ " Downloading https://download.pytorch.org/whl/cu121/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl (196.0 MB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m196.0/196.0 MB\u001b[0m \u001b[31m8.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hCollecting nvidia-nccl-cu12==2.19.3 (from torch==2.2.1->xformers)\r\n",
+ " Downloading https://download.pytorch.org/whl/cu121/nvidia_nccl_cu12-2.19.3-py3-none-manylinux1_x86_64.whl (166.0 MB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m166.0/166.0 MB\u001b[0m \u001b[31m9.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hCollecting nvidia-nvtx-cu12==12.1.105 (from torch==2.2.1->xformers)\r\n",
+ " Downloading https://download.pytorch.org/whl/cu121/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (99 kB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m99.1/99.1 kB\u001b[0m \u001b[31m6.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hCollecting triton==2.2.0 (from torch==2.2.1->xformers)\r\n",
+ " Downloading https://download.pytorch.org/whl/triton-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (167.9 MB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m167.9/167.9 MB\u001b[0m \u001b[31m8.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hCollecting nvidia-nvjitlink-cu12 (from nvidia-cusolver-cu12==11.4.5.107->torch==2.2.1->xformers)\r\n",
+ " Downloading https://download.pytorch.org/whl/cu121/nvidia_nvjitlink_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (19.8 MB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m19.8/19.8 MB\u001b[0m \u001b[31m46.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hRequirement already satisfied: MarkupSafe>=2.0 in /opt/conda/lib/python3.10/site-packages (from jinja2->torch==2.2.1->xformers) (2.1.3)\r\n",
+ "Requirement already satisfied: mpmath>=0.19 in /opt/conda/lib/python3.10/site-packages (from sympy->torch==2.2.1->xformers) (1.3.0)\r\n",
+ "Installing collected packages: triton, nvidia-nvtx-cu12, nvidia-nvjitlink-cu12, nvidia-nccl-cu12, nvidia-curand-cu12, nvidia-cufft-cu12, nvidia-cuda-runtime-cu12, nvidia-cuda-nvrtc-cu12, nvidia-cuda-cupti-cu12, nvidia-cublas-cu12, nvidia-cusparse-cu12, nvidia-cudnn-cu12, nvidia-cusolver-cu12, torch, xformers\r\n",
+ " Attempting uninstall: torch\r\n",
+ " Found existing installation: torch 2.1.2\r\n",
+ " Uninstalling torch-2.1.2:\r\n",
+ " Successfully uninstalled torch-2.1.2\r\n",
+ "Successfully installed nvidia-cublas-cu12-12.1.3.1 nvidia-cuda-cupti-cu12-12.1.105 nvidia-cuda-nvrtc-cu12-12.1.105 nvidia-cuda-runtime-cu12-12.1.105 nvidia-cudnn-cu12-8.9.2.26 nvidia-cufft-cu12-11.0.2.54 nvidia-curand-cu12-10.3.2.106 nvidia-cusolver-cu12-11.4.5.107 nvidia-cusparse-cu12-12.1.0.106 nvidia-nccl-cu12-2.19.3 nvidia-nvjitlink-cu12-12.1.105 nvidia-nvtx-cu12-12.1.105 torch-2.2.1+cu121 triton-2.2.0 xformers-0.0.25\r\n",
+ "Collecting unsloth@ git+https://github.com/unslothai/unsloth.git (from unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git)\r\n",
+ " Cloning https://github.com/unslothai/unsloth.git to /tmp/pip-install-yfpl0t85/unsloth_63990d9a4b8e4d6ca74bbfccdc6198cb\r\n",
+ " Running command git clone --filter=blob:none --quiet https://github.com/unslothai/unsloth.git /tmp/pip-install-yfpl0t85/unsloth_63990d9a4b8e4d6ca74bbfccdc6198cb\r\n",
+ " Resolved https://github.com/unslothai/unsloth.git to commit a68aebc1fa17755ffbcdafc9239e7ca37ab21657\r\n",
+ " Installing build dependencies ... \u001b[?25l-\b \b\\\b \b|\b \b/\b \b-\b \b\\\b \b|\b \b/\b \b-\b \bdone\r\n",
+ "\u001b[?25h Getting requirements to build wheel ... \u001b[?25l-\b \b\\\b \bdone\r\n",
+ "\u001b[?25h Installing backend dependencies ... \u001b[?25l-\b \b\\\b \b|\b \b/\b \bdone\r\n",
+ "\u001b[?25h Preparing metadata (pyproject.toml) ... \u001b[?25l-\b \b\\\b \bdone\r\n",
+ "\u001b[?25hRequirement already satisfied: triton in /opt/conda/lib/python3.10/site-packages (2.2.0)\r\n",
+ "Collecting datasets==2.17.1\r\n",
+ " Downloading datasets-2.17.1-py3-none-any.whl.metadata (20 kB)\r\n",
+ "Requirement already satisfied: filelock in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (3.13.1)\r\n",
+ "Requirement already satisfied: numpy>=1.17 in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (1.26.4)\r\n",
+ "Collecting pyarrow>=12.0.0 (from datasets==2.17.1)\r\n",
+ " Downloading pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl.metadata (3.0 kB)\r\n",
+ "Collecting pyarrow-hotfix (from datasets==2.17.1)\r\n",
+ " Downloading pyarrow_hotfix-0.6-py3-none-any.whl.metadata (3.6 kB)\r\n",
+ "Requirement already satisfied: dill<0.3.9,>=0.3.0 in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (0.3.8)\r\n",
+ "Requirement already satisfied: pandas in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (2.1.4)\r\n",
+ "Requirement already satisfied: requests>=2.19.0 in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (2.31.0)\r\n",
+ "Requirement already satisfied: tqdm>=4.62.1 in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (4.66.1)\r\n",
+ "Requirement already satisfied: xxhash in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (3.4.1)\r\n",
+ "Requirement already satisfied: multiprocess in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (0.70.16)\r\n",
+ "Collecting fsspec<=2023.10.0,>=2023.1.0 (from fsspec[http]<=2023.10.0,>=2023.1.0->datasets==2.17.1)\r\n",
+ " Downloading fsspec-2023.10.0-py3-none-any.whl.metadata (6.8 kB)\r\n",
+ "Requirement already satisfied: aiohttp in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (3.9.1)\r\n",
+ "Requirement already satisfied: huggingface-hub>=0.19.4 in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (0.21.4)\r\n",
+ "Requirement already satisfied: packaging in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (21.3)\r\n",
+ "Requirement already satisfied: pyyaml>=5.1 in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (6.0.1)\r\n",
+ "Collecting bitsandbytes (from unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git)\r\n",
+ " Downloading bitsandbytes-0.43.0-py3-none-manylinux_2_24_x86_64.whl.metadata (1.8 kB)\r\n",
+ "Requirement already satisfied: attrs>=17.3.0 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets==2.17.1) (23.2.0)\r\n",
+ "Requirement already satisfied: multidict<7.0,>=4.5 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets==2.17.1) (6.0.4)\r\n",
+ "Requirement already satisfied: yarl<2.0,>=1.0 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets==2.17.1) (1.9.3)\r\n",
+ "Requirement already satisfied: frozenlist>=1.1.1 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets==2.17.1) (1.4.1)\r\n",
+ "Requirement already satisfied: aiosignal>=1.1.2 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets==2.17.1) (1.3.1)\r\n",
+ "Requirement already satisfied: async-timeout<5.0,>=4.0 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets==2.17.1) (4.0.3)\r\n",
+ "Requirement already satisfied: typing-extensions>=3.7.4.3 in /opt/conda/lib/python3.10/site-packages (from huggingface-hub>=0.19.4->datasets==2.17.1) (4.9.0)\r\n",
+ "Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /opt/conda/lib/python3.10/site-packages (from packaging->datasets==2.17.1) (3.1.1)\r\n",
+ "Requirement already satisfied: charset-normalizer<4,>=2 in /opt/conda/lib/python3.10/site-packages (from requests>=2.19.0->datasets==2.17.1) (3.3.2)\r\n",
+ "Requirement already satisfied: idna<4,>=2.5 in /opt/conda/lib/python3.10/site-packages (from requests>=2.19.0->datasets==2.17.1) (3.6)\r\n",
+ "Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/conda/lib/python3.10/site-packages (from requests>=2.19.0->datasets==2.17.1) (1.26.18)\r\n",
+ "Requirement already satisfied: certifi>=2017.4.17 in /opt/conda/lib/python3.10/site-packages (from requests>=2.19.0->datasets==2.17.1) (2024.2.2)\r\n",
+ "Requirement already satisfied: torch in /opt/conda/lib/python3.10/site-packages (from bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (2.2.1+cu121)\r\n",
+ "Requirement already satisfied: python-dateutil>=2.8.2 in /opt/conda/lib/python3.10/site-packages (from pandas->datasets==2.17.1) (2.9.0.post0)\r\n",
+ "Requirement already satisfied: pytz>=2020.1 in /opt/conda/lib/python3.10/site-packages (from pandas->datasets==2.17.1) (2023.3.post1)\r\n",
+ "Requirement already satisfied: tzdata>=2022.1 in /opt/conda/lib/python3.10/site-packages (from pandas->datasets==2.17.1) (2023.4)\r\n",
+ "Collecting tyro (from unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git)\r\n",
+ " Downloading tyro-0.7.3-py3-none-any.whl.metadata (7.7 kB)\r\n",
+ "Requirement already satisfied: transformers>=4.38.2 in /opt/conda/lib/python3.10/site-packages (from unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (4.38.2)\r\n",
+ "Requirement already satisfied: sentencepiece in /opt/conda/lib/python3.10/site-packages (from unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (0.2.0)\r\n",
+ "Requirement already satisfied: psutil in /opt/conda/lib/python3.10/site-packages (from unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (5.9.3)\r\n",
+ "Requirement already satisfied: wheel>=0.42.0 in /opt/conda/lib/python3.10/site-packages (from unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (0.42.0)\r\n",
+ "Requirement already satisfied: accelerate>=0.26.1 in /opt/conda/lib/python3.10/site-packages (from unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (0.28.0)\r\n",
+ "Collecting trl>=0.7.9 (from unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git)\r\n",
+ " Downloading trl-0.8.1-py3-none-any.whl.metadata (11 kB)\r\n",
+ "Collecting peft>=0.7.1 (from unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git)\r\n",
+ " Downloading peft-0.10.0-py3-none-any.whl.metadata (13 kB)\r\n",
+ "Requirement already satisfied: safetensors>=0.3.1 in /opt/conda/lib/python3.10/site-packages (from accelerate>=0.26.1->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (0.4.2)\r\n",
+ "Requirement already satisfied: six>=1.5 in /opt/conda/lib/python3.10/site-packages (from python-dateutil>=2.8.2->pandas->datasets==2.17.1) (1.16.0)\r\n",
+ "Requirement already satisfied: sympy in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (1.12)\r\n",
+ "Requirement already satisfied: networkx in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (3.2.1)\r\n",
+ "Requirement already satisfied: jinja2 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (3.1.2)\r\n",
+ "Requirement already satisfied: nvidia-cuda-nvrtc-cu12==12.1.105 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (12.1.105)\r\n",
+ "Requirement already satisfied: nvidia-cuda-runtime-cu12==12.1.105 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (12.1.105)\r\n",
+ "Requirement already satisfied: nvidia-cuda-cupti-cu12==12.1.105 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (12.1.105)\r\n",
+ "Requirement already satisfied: nvidia-cudnn-cu12==8.9.2.26 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (8.9.2.26)\r\n",
+ "Requirement already satisfied: nvidia-cublas-cu12==12.1.3.1 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (12.1.3.1)\r\n",
+ "Requirement already satisfied: nvidia-cufft-cu12==11.0.2.54 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (11.0.2.54)\r\n",
+ "Requirement already satisfied: nvidia-curand-cu12==10.3.2.106 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (10.3.2.106)\r\n",
+ "Requirement already satisfied: nvidia-cusolver-cu12==11.4.5.107 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (11.4.5.107)\r\n",
+ "Requirement already satisfied: nvidia-cusparse-cu12==12.1.0.106 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (12.1.0.106)\r\n",
+ "Requirement already satisfied: nvidia-nccl-cu12==2.19.3 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (2.19.3)\r\n",
+ "Requirement already satisfied: nvidia-nvtx-cu12==12.1.105 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (12.1.105)\r\n",
+ "Requirement already satisfied: nvidia-nvjitlink-cu12 in /opt/conda/lib/python3.10/site-packages (from nvidia-cusolver-cu12==11.4.5.107->torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (12.1.105)\r\n",
+ "Requirement already satisfied: regex!=2019.12.17 in /opt/conda/lib/python3.10/site-packages (from transformers>=4.38.2->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (2023.12.25)\r\n",
+ "Requirement already satisfied: tokenizers<0.19,>=0.14 in /opt/conda/lib/python3.10/site-packages (from transformers>=4.38.2->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (0.15.2)\r\n",
+ "Requirement already satisfied: docstring-parser>=0.14.1 in /opt/conda/lib/python3.10/site-packages (from tyro->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (0.15)\r\n",
+ "Requirement already satisfied: rich>=11.1.0 in /opt/conda/lib/python3.10/site-packages (from tyro->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (13.7.0)\r\n",
+ "Collecting shtab>=1.5.6 (from tyro->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git)\r\n",
+ " Downloading shtab-1.7.1-py3-none-any.whl.metadata (7.3 kB)\r\n",
+ "Requirement already satisfied: markdown-it-py>=2.2.0 in /opt/conda/lib/python3.10/site-packages (from rich>=11.1.0->tyro->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (3.0.0)\r\n",
+ "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /opt/conda/lib/python3.10/site-packages (from rich>=11.1.0->tyro->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (2.17.2)\r\n",
+ "Requirement already satisfied: MarkupSafe>=2.0 in /opt/conda/lib/python3.10/site-packages (from jinja2->torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (2.1.3)\r\n",
+ "Requirement already satisfied: mpmath>=0.19 in /opt/conda/lib/python3.10/site-packages (from sympy->torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (1.3.0)\r\n",
+ "Requirement already satisfied: mdurl~=0.1 in /opt/conda/lib/python3.10/site-packages (from markdown-it-py>=2.2.0->rich>=11.1.0->tyro->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (0.1.2)\r\n",
+ "Downloading datasets-2.17.1-py3-none-any.whl (536 kB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m536.7/536.7 kB\u001b[0m \u001b[31m4.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hDownloading fsspec-2023.10.0-py3-none-any.whl (166 kB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m166.4/166.4 kB\u001b[0m \u001b[31m10.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hDownloading pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl (38.3 MB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m38.3/38.3 MB\u001b[0m \u001b[31m36.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hDownloading bitsandbytes-0.43.0-py3-none-manylinux_2_24_x86_64.whl (102.2 MB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m102.2/102.2 MB\u001b[0m \u001b[31m11.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hDownloading pyarrow_hotfix-0.6-py3-none-any.whl (7.9 kB)\r\n",
+ "Downloading peft-0.10.0-py3-none-any.whl (199 kB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m199.1/199.1 kB\u001b[0m \u001b[31m14.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hDownloading trl-0.8.1-py3-none-any.whl (225 kB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m225.0/225.0 kB\u001b[0m \u001b[31m11.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hDownloading tyro-0.7.3-py3-none-any.whl (79 kB)\r\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m79.8/79.8 kB\u001b[0m \u001b[31m5.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n",
+ "\u001b[?25hDownloading shtab-1.7.1-py3-none-any.whl (14 kB)\r\n",
+ "Building wheels for collected packages: unsloth\r\n",
+ " Building wheel for unsloth (pyproject.toml) ... \u001b[?25l-\b \b\\\b \b|\b \bdone\r\n",
+ "\u001b[?25h Created wheel for unsloth: filename=unsloth-2024.3-py3-none-any.whl size=93934 sha256=34861411793a48098b4d9e04f35bc2ce841bfae25a980dd6ce151eecc1321a1a\r\n",
+ " Stored in directory: /tmp/pip-ephem-wheel-cache-6kf3ks_c/wheels/ed/d4/e9/76fb290ee3df0a5fc21ce5c2c788e29e9607a2353d8342fd0d\r\n",
+ "Successfully built unsloth\r\n",
+ "Installing collected packages: unsloth, shtab, pyarrow-hotfix, pyarrow, fsspec, tyro, datasets, bitsandbytes, trl, peft\r\n",
+ " Attempting uninstall: pyarrow\r\n",
+ " Found existing installation: pyarrow 11.0.0\r\n",
+ " Uninstalling pyarrow-11.0.0:\r\n",
+ " Successfully uninstalled pyarrow-11.0.0\r\n",
+ " Attempting uninstall: fsspec\r\n",
+ " Found existing installation: fsspec 2024.3.0\r\n",
+ " Uninstalling fsspec-2024.3.0:\r\n",
+ " Successfully uninstalled fsspec-2024.3.0\r\n",
+ " Attempting uninstall: datasets\r\n",
+ " Found existing installation: datasets 2.1.0\r\n",
+ " Uninstalling datasets-2.1.0:\r\n",
+ " Successfully uninstalled datasets-2.1.0\r\n",
+ "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\r\n",
+ "cudf 23.8.0 requires cubinlinker, which is not installed.\r\n",
+ "cudf 23.8.0 requires cupy-cuda11x>=12.0.0, which is not installed.\r\n",
+ "cudf 23.8.0 requires ptxcompiler, which is not installed.\r\n",
+ "cuml 23.8.0 requires cupy-cuda11x>=12.0.0, which is not installed.\r\n",
+ "dask-cudf 23.8.0 requires cupy-cuda11x>=12.0.0, which is not installed.\r\n",
+ "apache-beam 2.46.0 requires dill<0.3.2,>=0.3.1.1, but you have dill 0.3.8 which is incompatible.\r\n",
+ "apache-beam 2.46.0 requires numpy<1.25.0,>=1.14.3, but you have numpy 1.26.4 which is incompatible.\r\n",
+ "apache-beam 2.46.0 requires pyarrow<10.0.0,>=3.0.0, but you have pyarrow 15.0.2 which is incompatible.\r\n",
+ "beatrix-jupyterlab 2023.128.151533 requires jupyterlab~=3.6.0, but you have jupyterlab 4.1.5 which is incompatible.\r\n",
+ "cudf 23.8.0 requires cuda-python<12.0a0,>=11.7.1, but you have cuda-python 12.4.0 which is incompatible.\r\n",
+ "cudf 23.8.0 requires pandas<1.6.0dev0,>=1.3, but you have pandas 2.1.4 which is incompatible.\r\n",
+ "cudf 23.8.0 requires protobuf<5,>=4.21, but you have protobuf 3.20.3 which is incompatible.\r\n",
+ "cudf 23.8.0 requires pyarrow==11.*, but you have pyarrow 15.0.2 which is incompatible.\r\n",
+ "cuml 23.8.0 requires dask==2023.7.1, but you have dask 2024.3.1 which is incompatible.\r\n",
+ "dask-cuda 23.8.0 requires dask==2023.7.1, but you have dask 2024.3.1 which is incompatible.\r\n",
+ "dask-cuda 23.8.0 requires pandas<1.6.0dev0,>=1.3, but you have pandas 2.1.4 which is incompatible.\r\n",
+ "dask-cudf 23.8.0 requires dask==2023.7.1, but you have dask 2024.3.1 which is incompatible.\r\n",
+ "dask-cudf 23.8.0 requires pandas<1.6.0dev0,>=1.3, but you have pandas 2.1.4 which is incompatible.\r\n",
+ "distributed 2023.7.1 requires dask==2023.7.1, but you have dask 2024.3.1 which is incompatible.\r\n",
+ "gcsfs 2023.12.2.post1 requires fsspec==2023.12.2, but you have fsspec 2023.10.0 which is incompatible.\r\n",
+ "raft-dask 23.8.0 requires dask==2023.7.1, but you have dask 2024.3.1 which is incompatible.\r\n",
+ "s3fs 2024.3.0 requires fsspec==2024.3.0, but you have fsspec 2023.10.0 which is incompatible.\u001b[0m\u001b[31m\r\n",
+ "\u001b[0mSuccessfully installed bitsandbytes-0.43.0 datasets-2.17.1 fsspec-2023.10.0 peft-0.10.0 pyarrow-15.0.2 pyarrow-hotfix-0.6 shtab-1.7.1 trl-0.8.1 tyro-0.7.3 unsloth-2024.3\r\n"
+ ]
+ }
+ ],
+ "source": [
+ "#%%capture\n",
+ "#import torch\n",
+ "#major_version, minor_version = torch.cuda.get_device_capability()\n",
+ "\n",
+ "!pip install -U xformers --index-url https://download.pytorch.org/whl/cu121\n",
+ "!pip install \"unsloth[kaggle-new] @ git+https://github.com/unslothai/unsloth.git\" triton datasets==2.17.1\n",
+ "#if major_version >= 8:\n",
+ "# # Use this for new GPUs like Ampere, Hopper GPUs (RTX 30xx, RTX 40xx, A100, H100, L40)\n",
+ "# !pip install --no-deps packaging ninja einops flash-attn xformers trl peft accelerate bitsandbytes\n",
+ "#else:\n",
+ "# # Use this for older GPUs (V100, Tesla T4, RTX 20xx)\n",
+ "# !pip install --no-deps xformers trl peft accelerate bitsandbytes\n",
+ "\n",
+ "import os\n",
+ "os.environ[\"WANDB_DISABLED\"] = \"true\""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "c963a9d2",
+ "metadata": {
+ "id": "r2v_X2fA0Df5",
+ "papermill": {
+ "duration": 0.123192,
+ "end_time": "2024-03-28T00:12:44.398766",
+ "exception": false,
+ "start_time": "2024-03-28T00:12:44.275574",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "source": [
+ "* We support Llama, Mistral, CodeLlama, TinyLlama, Vicuna, Open Hermes etc\n",
+ "* And Yi, Qwen ([llamafied](https://huggingface.co/models?sort=trending&search=qwen+llama)), Deepseek, all Llama, Mistral derived archs.\n",
+ "* We support 16bit LoRA or 4bit QLoRA. Both 2x faster.\n",
+ "* `max_seq_length` can be set to anything, since we do automatic RoPE Scaling via [kaiokendev's](https://kaiokendev.github.io/til) method.\n",
+ "* With [PR 26037](https://github.com/huggingface/transformers/pull/26037), we support downloading 4bit models **4x faster**! [Our repo](https://huggingface.co/unsloth) has Llama, Mistral 4bit models.\n",
+ "* [**NEW**] We make Gemma 6 trillion tokens **2.5x faster**! See our [Gemma notebook](https://colab.research.google.com/drive/10NbwlsRChbma1v55m8LAPYG15uQv6HLo?usp=sharing)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "88a40779",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2024-03-28T00:12:44.654372Z",
+ "iopub.status.busy": "2024-03-28T00:12:44.653667Z",
+ "iopub.status.idle": "2024-03-28T00:13:38.483179Z",
+ "shell.execute_reply": "2024-03-28T00:13:38.482338Z"
+ },
+ "id": "QmUBVEnvCDJv",
+ "outputId": "40383ec5-b379-4fcd-ba5c-b5656b0ff129",
+ "papermill": {
+ "duration": 53.95967,
+ "end_time": "2024-03-28T00:13:38.485764",
+ "exception": false,
+ "start_time": "2024-03-28T00:12:44.526094",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "outputs": [
+ {
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "58648d0ab785418089b24914c46df7a4",
+ "version_major": 2,
+ "version_minor": 0
+ },
+ "text/plain": [
+ "config.json: 0%| | 0.00/1.05k [00:00, ?B/s]"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "==((====))== Unsloth: Fast Mistral patching release 2024.3\n",
+ " \\\\ /| GPU: Tesla T4. Max memory: 14.748 GB. Platform = Linux.\n",
+ "O^O/ \\_/ \\ Pytorch: 2.2.1+cu121. CUDA = 7.5. CUDA Toolkit = 12.1.\n",
+ "\\ / Bfloat16 = FALSE. Xformers = 0.0.25. FA = False.\n",
+ " \"-____-\" Free Apache license: http://github.com/unslothai/unsloth\n"
+ ]
+ },
+ {
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "0291f4d1f4734954946a71afef1e1519",
+ "version_major": 2,
+ "version_minor": 0
+ },
+ "text/plain": [
+ "model.safetensors: 0%| | 0.00/4.13G [00:00, ?B/s]"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "944f3bb3fc0f4ef594424d1ed90f391b",
+ "version_major": 2,
+ "version_minor": 0
+ },
+ "text/plain": [
+ "generation_config.json: 0%| | 0.00/116 [00:00, ?B/s]"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "9aeb5170934f4e7da9748e8859e40e30",
+ "version_major": 2,
+ "version_minor": 0
+ },
+ "text/plain": [
+ "tokenizer_config.json: 0%| | 0.00/971 [00:00, ?B/s]"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "92da558bad0e4330a2b4b3041ed24aad",
+ "version_major": 2,
+ "version_minor": 0
+ },
+ "text/plain": [
+ "tokenizer.model: 0%| | 0.00/493k [00:00, ?B/s]"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "59233cb5bb5849d0800cde9d3c129184",
+ "version_major": 2,
+ "version_minor": 0
+ },
+ "text/plain": [
+ "special_tokens_map.json: 0%| | 0.00/438 [00:00, ?B/s]"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "84af480c5f6c4ad490074ef103af5628",
+ "version_major": 2,
+ "version_minor": 0
+ },
+ "text/plain": [
+ "tokenizer.json: 0%| | 0.00/1.80M [00:00, ?B/s]"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "2024-03-28 00:13:27.566798: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n",
+ "2024-03-28 00:13:27.566934: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n",
+ "2024-03-28 00:13:27.741422: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n"
+ ]
+ }
+ ],
+ "source": [
+ "from unsloth import FastLanguageModel\n",
+ "import torch\n",
+ "max_seq_length = 2048 # Choose any! We auto support RoPE Scaling internally!\n",
+ "dtype = None # None for auto detection. Float16 for Tesla T4, V100, Bfloat16 for Ampere+\n",
+ "load_in_4bit = True # Use 4bit quantization to reduce memory usage. Can be False.\n",
+ "\n",
+ "# 4bit pre quantized models we support for 4x faster downloading + no OOMs.\n",
+ "fourbit_models = [\n",
+ " \"unsloth/mistral-7b-bnb-4bit\",\n",
+ " \"unsloth/mistral-7b-instruct-v0.2-bnb-4bit\",\n",
+ " \"unsloth/llama-2-7b-bnb-4bit\",\n",
+ " \"unsloth/llama-2-13b-bnb-4bit\",\n",
+ " \"unsloth/codellama-34b-bnb-4bit\",\n",
+ " \"unsloth/tinyllama-bnb-4bit\",\n",
+ " \"unsloth/gemma-7b-bnb-4bit\", # New Google 6 trillion tokens model 2.5x faster!\n",
+ " \"unsloth/gemma-2b-bnb-4bit\",\n",
+ "] # More models at https://huggingface.co/unsloth\n",
+ "\n",
+ "model, tokenizer = FastLanguageModel.from_pretrained(\n",
+ " model_name = \"unsloth/mistral-7b-bnb-4bit\", # Choose ANY! eg teknium/OpenHermes-2.5-Mistral-7B\n",
+ " max_seq_length = max_seq_length,\n",
+ " dtype = dtype,\n",
+ " load_in_4bit = load_in_4bit,\n",
+ " # token = \"hf_...\", # use one if using gated models like meta-llama/Llama-2-7b-hf\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "ca908244",
+ "metadata": {
+ "id": "SXd9bTZd1aaL",
+ "papermill": {
+ "duration": 0.12735,
+ "end_time": "2024-03-28T00:13:38.741441",
+ "exception": false,
+ "start_time": "2024-03-28T00:13:38.614091",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "source": [
+ "We now add LoRA adapters so we only need to update 1 to 10% of all parameters!"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "id": "9a50c1ab",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2024-03-28T00:13:38.993225Z",
+ "iopub.status.busy": "2024-03-28T00:13:38.992478Z",
+ "iopub.status.idle": "2024-03-28T00:13:39.865675Z",
+ "shell.execute_reply": "2024-03-28T00:13:39.864586Z"
+ },
+ "id": "6bZsfBuZDeCL",
+ "outputId": "4c986b9b-ee42-48d6-ba35-6a709e919c82",
+ "papermill": {
+ "duration": 1.001126,
+ "end_time": "2024-03-28T00:13:39.869351",
+ "exception": false,
+ "start_time": "2024-03-28T00:13:38.868225",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "Unsloth 2024.3 patched 32 layers with 32 QKV layers, 32 O layers and 32 MLP layers.\n"
+ ]
+ }
+ ],
+ "source": [
+ "model = FastLanguageModel.get_peft_model(\n",
+ " model,\n",
+ " r = 16, # Choose any number > 0 ! Suggested 8, 16, 32, 64, 128\n",
+ " target_modules = [\"q_proj\", \"k_proj\", \"v_proj\", \"o_proj\",\n",
+ " \"gate_proj\", \"up_proj\", \"down_proj\",],\n",
+ " lora_alpha = 16,\n",
+ " lora_dropout = 0, # Supports any, but = 0 is optimized\n",
+ " bias = \"none\", # Supports any, but = \"none\" is optimized\n",
+ " use_gradient_checkpointing = True,\n",
+ " random_state = 3407,\n",
+ " use_rslora = False, # We support rank stabilized LoRA\n",
+ " loftq_config = None, # And LoftQ\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "0b7c4848",
+ "metadata": {
+ "id": "vITh0KVJ10qX",
+ "papermill": {
+ "duration": 0.124172,
+ "end_time": "2024-03-28T00:13:40.129776",
+ "exception": false,
+ "start_time": "2024-03-28T00:13:40.005604",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "source": [
+ "\n",
+ "### Data Prep\n",
+ "We now use the `ChatML` format for conversation style finetunes. We use [Open Assistant conversations](https://huggingface.co/datasets/philschmid/guanaco-sharegpt-style) in ShareGPT style. ChatML renders multi turn conversations like below:\n",
+ "\n",
+ "```\n",
+ "<|im_start|>system\n",
+ "You are a helpful assistant.<|im_end|>\n",
+ "<|im_start|>user\n",
+ "What's the capital of France?<|im_end|>\n",
+ "<|im_start|>assistant\n",
+ "Paris.\n",
+ "```\n",
+ "\n",
+ "**[NOTE]** To train only on completions (ignoring the user's input) read TRL's docs [here](https://huggingface.co/docs/trl/sft_trainer#train-on-completions-only).\n",
+ "\n",
+ "We use our `get_chat_template` function to get the correct chat template. We support `zephyr, chatml, mistral, llama, alpaca, vicuna, vicuna_old` and our own optimized `unsloth` template.\n",
+ "\n",
+ "Normally one has to train `<|im_start|>` and `<|im_end|>`. We instead map `<|im_end|>` to be the EOS token, and leave `<|im_start|>` as is. This requires no additional training of additional tokens.\n",
+ "\n",
+ "Note ShareGPT uses `{\"from\": \"human\", \"value\" : \"Hi\"}` and not `{\"role\": \"user\", \"content\" : \"Hi\"}`, so we use `mapping` to map it.\n",
+ "\n",
+ "For text completions like novel writing, try this [notebook](https://colab.research.google.com/drive/1ef-tab5bhkvWmBOObepl1WgJvfvSzn5Q?usp=sharing)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "id": "0d33d99d",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2024-03-28T00:13:40.375057Z",
+ "iopub.status.busy": "2024-03-28T00:13:40.374718Z",
+ "iopub.status.idle": "2024-03-28T00:13:41.633504Z",
+ "shell.execute_reply": "2024-03-28T00:13:41.632366Z"
+ },
+ "id": "LjY75GoYUCB8",
+ "outputId": "50c7b539-b750-4964-fa4a-45a99d5923f1",
+ "papermill": {
+ "duration": 1.382761,
+ "end_time": "2024-03-28T00:13:41.635817",
+ "exception": false,
+ "start_time": "2024-03-28T00:13:40.253056",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "Unsloth: Will map <|im_end|> to EOS = .\n"
+ ]
+ }
+ ],
+ "source": [
+ "from datasets import load_dataset\n",
+ "import json\n",
+ "from unsloth.chat_templates import get_chat_template\n",
+ "\n",
+ "tokenizer = get_chat_template(\n",
+ " tokenizer,\n",
+ " chat_template = \"chatml\", # Supports zephyr, chatml, mistral, llama, alpaca, vicuna, vicuna_old, unsloth\n",
+ " #mapping = {\"role\" : \"from\", \"content\" : \"value\", \"user\" : \"human\", \"assistant\" : \"gpt\"}, # ShareGPT style\n",
+ " map_eos_token = True, # Maps <|im_end|> to instead\n",
+ ")\n",
+ "\n",
+ "def formatting_prompts_func(convos):\n",
+ " texts = [tokenizer.apply_chat_template(convo, tokenize = False, add_generation_prompt = False) for convo in convos]\n",
+ " return { \"text\" : texts, }\n",
+ "\n",
+ "with open(\"/kaggle/input/the-group-chat/output-10k-c.json\") as chatfile:\n",
+ " convos = [json.loads(j) for j in chatfile.readlines()]\n",
+ "\n",
+ "dataset = formatting_prompts_func(convos)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "f75a3f33",
+ "metadata": {
+ "id": "cHiVoToneynS",
+ "papermill": {
+ "duration": 0.127199,
+ "end_time": "2024-03-28T00:13:41.890438",
+ "exception": false,
+ "start_time": "2024-03-28T00:13:41.763239",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "source": [
+ "Let's see how the `ChatML` format works by printing the 5th element"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "id": "08ef098f",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2024-03-28T00:13:42.144988Z",
+ "iopub.status.busy": "2024-03-28T00:13:42.144281Z",
+ "iopub.status.idle": "2024-03-28T00:13:42.148878Z",
+ "shell.execute_reply": "2024-03-28T00:13:42.147833Z"
+ },
+ "id": "U5iEWrUkevpE",
+ "outputId": "e28b6889-29f9-400f-a08c-5fc7d5cbc5db",
+ "papermill": {
+ "duration": 0.133687,
+ "end_time": "2024-03-28T00:13:42.150735",
+ "exception": false,
+ "start_time": "2024-03-28T00:13:42.017048",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "#dataset[5][\"conversations\"]\n",
+ "#print(dataset[\"text\"])"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "a77a6d20",
+ "metadata": {
+ "id": "GuKOAUDpUeDL",
+ "papermill": {
+ "duration": 0.121878,
+ "end_time": "2024-03-28T00:13:42.399195",
+ "exception": false,
+ "start_time": "2024-03-28T00:13:42.277317",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "source": [
+ "If you're looking to make your own chat template, that also is possible! You must use the Jinja templating regime. We provide our own stripped down version of the `Unsloth template` which we find to be more efficient, and leverages ChatML, Zephyr and Alpaca styles.\n",
+ "\n",
+ "More info on chat templates on [our wiki page!](https://github.com/unslothai/unsloth/wiki#chat-templates)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "id": "cdd24991",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2024-03-28T00:13:42.653294Z",
+ "iopub.status.busy": "2024-03-28T00:13:42.652894Z",
+ "iopub.status.idle": "2024-03-28T00:13:42.658835Z",
+ "shell.execute_reply": "2024-03-28T00:13:42.657902Z"
+ },
+ "id": "p31Z-S6FUieB",
+ "papermill": {
+ "duration": 0.136303,
+ "end_time": "2024-03-28T00:13:42.660931",
+ "exception": false,
+ "start_time": "2024-03-28T00:13:42.524628",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "unsloth_template = \\\n",
+ " \"{{ bos_token }}\"\\\n",
+ " \"{{ 'You are a helpful assistant to the user\\n' }}\"\\\n",
+ " \"{% endif %}\"\\\n",
+ " \"{% for message in messages %}\"\\\n",
+ " \"{% if message['role'] == 'user' %}\"\\\n",
+ " \"{{ '>>> User: ' + message['content'] + '\\n' }}\"\\\n",
+ " \"{% elif message['role'] == 'assistant' %}\"\\\n",
+ " \"{{ '>>> Assistant: ' + message['content'] + eos_token + '\\n' }}\"\\\n",
+ " \"{% endif %}\"\\\n",
+ " \"{% endfor %}\"\\\n",
+ " \"{% if add_generation_prompt %}\"\\\n",
+ " \"{{ '>>> Assistant: ' }}\"\\\n",
+ " \"{% endif %}\"\n",
+ "unsloth_eos_token = \"eos_token\"\n",
+ "\n",
+ "if False:\n",
+ " tokenizer = get_chat_template(\n",
+ " tokenizer,\n",
+ " chat_template = (unsloth_template, unsloth_eos_token,), # You must provide a template and EOS token\n",
+ " mapping = {\"role\" : \"from\", \"content\" : \"value\", \"user\" : \"human\", \"assistant\" : \"gpt\"}, # ShareGPT style\n",
+ " map_eos_token = True, # Maps <|im_end|> to instead\n",
+ " )"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "44e5c271",
+ "metadata": {
+ "id": "idAEIeSQ3xdS",
+ "papermill": {
+ "duration": 0.127599,
+ "end_time": "2024-03-28T00:13:42.915115",
+ "exception": false,
+ "start_time": "2024-03-28T00:13:42.787516",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "source": [
+ "\n",
+ "### Train the model\n",
+ "Now let's use Huggingface TRL's `SFTTrainer`! More docs here: [TRL SFT docs](https://huggingface.co/docs/trl/sft_trainer). We do 60 steps to speed things up, but you can set `num_train_epochs=1` for a full run, and turn off `max_steps=None`. We also support TRL's `DPOTrainer`!"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "id": "84d94e51",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2024-03-28T00:13:43.163495Z",
+ "iopub.status.busy": "2024-03-28T00:13:43.162623Z",
+ "iopub.status.idle": "2024-03-28T00:13:43.243458Z",
+ "shell.execute_reply": "2024-03-28T00:13:43.242622Z"
+ },
+ "papermill": {
+ "duration": 0.20747,
+ "end_time": "2024-03-28T00:13:43.245965",
+ "exception": false,
+ "start_time": "2024-03-28T00:13:43.038495",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "from datasets import Dataset\n",
+ "dataset = Dataset.from_dict(dataset)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "id": "099afa9e",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2024-03-28T00:13:43.492984Z",
+ "iopub.status.busy": "2024-03-28T00:13:43.492622Z",
+ "iopub.status.idle": "2024-03-28T00:13:48.324291Z",
+ "shell.execute_reply": "2024-03-28T00:13:48.323307Z"
+ },
+ "id": "95_Nn-89DhsL",
+ "outputId": "c13d3e90-5342-4535-9541-98f9120dfe2b",
+ "papermill": {
+ "duration": 4.95752,
+ "end_time": "2024-03-28T00:13:48.326701",
+ "exception": false,
+ "start_time": "2024-03-28T00:13:43.369181",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "Using the `WANDB_DISABLED` environment variable is deprecated and will be removed in v5. Use the --report_to flag to control the integrations used for logging result (for instance --report_to none).\n"
+ ]
+ },
+ {
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "e363d483a5134f5d873c11f936d2d9f5",
+ "version_major": 2,
+ "version_minor": 0
+ },
+ "text/plain": [
+ "Map (num_proc=2): 0%| | 0/10000 [00:00, ? examples/s]"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from trl import SFTTrainer\n",
+ "from transformers import TrainingArguments\n",
+ "\n",
+ "trainer = SFTTrainer(\n",
+ " model = model,\n",
+ " tokenizer = tokenizer,\n",
+ " train_dataset = dataset,\n",
+ " dataset_text_field = \"text\",\n",
+ " max_seq_length = max_seq_length,\n",
+ " dataset_num_proc = 2,\n",
+ " packing = False, # Can make training 5x faster for short sequences.\n",
+ " args = TrainingArguments(\n",
+ " per_device_train_batch_size = 2,\n",
+ " gradient_accumulation_steps = 4,\n",
+ " warmup_steps = 5,\n",
+ " num_train_epochs=1,\n",
+ " learning_rate = 2e-4,\n",
+ " fp16 = not torch.cuda.is_bf16_supported(),\n",
+ " bf16 = torch.cuda.is_bf16_supported(),\n",
+ " logging_steps = 1,\n",
+ " optim = \"adamw_8bit\",\n",
+ " weight_decay = 0.01,\n",
+ " lr_scheduler_type = \"linear\",\n",
+ " seed = 3407,\n",
+ " output_dir = \"outputs\",\n",
+ " ),\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "id": "56281856",
+ "metadata": {
+ "cellView": "form",
+ "execution": {
+ "iopub.execute_input": "2024-03-28T00:13:48.575758Z",
+ "iopub.status.busy": "2024-03-28T00:13:48.575334Z",
+ "iopub.status.idle": "2024-03-28T00:13:48.582620Z",
+ "shell.execute_reply": "2024-03-28T00:13:48.581689Z"
+ },
+ "id": "2ejIt2xSNKKp",
+ "outputId": "a537db02-e673-44da-8889-5fa95a5e2d51",
+ "papermill": {
+ "duration": 0.137429,
+ "end_time": "2024-03-28T00:13:48.585471",
+ "exception": false,
+ "start_time": "2024-03-28T00:13:48.448042",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "GPU = Tesla T4. Max memory = 14.748 GB.\n",
+ "4.5 GB of memory reserved.\n"
+ ]
+ }
+ ],
+ "source": [
+ "#@title Show current memory stats\n",
+ "gpu_stats = torch.cuda.get_device_properties(0)\n",
+ "start_gpu_memory = round(torch.cuda.max_memory_reserved() / 1024 / 1024 / 1024, 3)\n",
+ "max_memory = round(gpu_stats.total_memory / 1024 / 1024 / 1024, 3)\n",
+ "print(f\"GPU = {gpu_stats.name}. Max memory = {max_memory} GB.\")\n",
+ "print(f\"{start_gpu_memory} GB of memory reserved.\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "id": "a4e1702c",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2024-03-28T00:13:48.854943Z",
+ "iopub.status.busy": "2024-03-28T00:13:48.854292Z",
+ "iopub.status.idle": "2024-03-28T03:52:49.428064Z",
+ "shell.execute_reply": "2024-03-28T03:52:49.427099Z"
+ },
+ "id": "yqxqAZ7KJ4oL",
+ "outputId": "db7bae40-bf0a-4908-8867-a5dfe933e1f3",
+ "papermill": {
+ "duration": 13140.716117,
+ "end_time": "2024-03-28T03:52:49.430510",
+ "exception": false,
+ "start_time": "2024-03-28T00:13:48.714393",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "==((====))== Unsloth - 2x faster free finetuning | Num GPUs = 1\n",
+ " \\\\ /| Num examples = 10,000 | Num Epochs = 1\n",
+ "O^O/ \\_/ \\ Batch size per device = 2 | Gradient Accumulation steps = 4\n",
+ "\\ / Total batch size = 8 | Total steps = 1,250\n",
+ " \"-____-\" Number of trainable parameters = 41,943,040\n"
+ ]
+ },
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " \n",
+ "
\n",
+ " [1250/1250 3:38:46, Epoch 1/1]\n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " Step | \n",
+ " Training Loss | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 1 | \n",
+ " 2.415600 | \n",
+ "
\n",
+ " \n",
+ " 2 | \n",
+ " 2.560600 | \n",
+ "
\n",
+ " \n",
+ " 3 | \n",
+ " 2.358100 | \n",
+ "
\n",
+ " \n",
+ " 4 | \n",
+ " 2.018800 | \n",
+ "
\n",
+ " \n",
+ " 5 | \n",
+ " 1.869800 | \n",
+ "
\n",
+ " \n",
+ " 6 | \n",
+ " 1.859900 | \n",
+ "
\n",
+ " \n",
+ " 7 | \n",
+ " 1.855700 | \n",
+ "
\n",
+ " \n",
+ " 8 | \n",
+ " 1.985000 | \n",
+ "
\n",
+ " \n",
+ " 9 | \n",
+ " 1.739100 | \n",
+ "
\n",
+ " \n",
+ " 10 | \n",
+ " 1.857900 | \n",
+ "
\n",
+ " \n",
+ " 11 | \n",
+ " 1.858300 | \n",
+ "
\n",
+ " \n",
+ " 12 | \n",
+ " 1.574900 | \n",
+ "
\n",
+ " \n",
+ " 13 | \n",
+ " 1.680000 | \n",
+ "
\n",
+ " \n",
+ " 14 | \n",
+ " 1.615100 | \n",
+ "
\n",
+ " \n",
+ " 15 | \n",
+ " 1.720000 | \n",
+ "
\n",
+ " \n",
+ " 16 | \n",
+ " 1.731600 | \n",
+ "
\n",
+ " \n",
+ " 17 | \n",
+ " 1.727100 | \n",
+ "
\n",
+ " \n",
+ " 18 | \n",
+ " 1.587100 | \n",
+ "
\n",
+ " \n",
+ " 19 | \n",
+ " 1.579300 | \n",
+ "
\n",
+ " \n",
+ " 20 | \n",
+ " 1.642300 | \n",
+ "
\n",
+ " \n",
+ " 21 | \n",
+ " 1.487200 | \n",
+ "
\n",
+ " \n",
+ " 22 | \n",
+ " 1.585400 | \n",
+ "
\n",
+ " \n",
+ " 23 | \n",
+ " 1.611900 | \n",
+ "
\n",
+ " \n",
+ " 24 | \n",
+ " 1.598700 | \n",
+ "
\n",
+ " \n",
+ " 25 | \n",
+ " 1.617600 | \n",
+ "
\n",
+ " \n",
+ " 26 | \n",
+ " 1.511700 | \n",
+ "
\n",
+ " \n",
+ " 27 | \n",
+ " 1.805500 | \n",
+ "
\n",
+ " \n",
+ " 28 | \n",
+ " 1.569000 | \n",
+ "
\n",
+ " \n",
+ " 29 | \n",
+ " 1.652700 | \n",
+ "
\n",
+ " \n",
+ " 30 | \n",
+ " 1.421700 | \n",
+ "
\n",
+ " \n",
+ " 31 | \n",
+ " 1.666500 | \n",
+ "
\n",
+ " \n",
+ " 32 | \n",
+ " 1.633400 | \n",
+ "
\n",
+ " \n",
+ " 33 | \n",
+ " 1.630900 | \n",
+ "
\n",
+ " \n",
+ " 34 | \n",
+ " 1.744100 | \n",
+ "
\n",
+ " \n",
+ " 35 | \n",
+ " 1.577500 | \n",
+ "
\n",
+ " \n",
+ " 36 | \n",
+ " 1.665400 | \n",
+ "
\n",
+ " \n",
+ " 37 | \n",
+ " 1.569500 | \n",
+ "
\n",
+ " \n",
+ " 38 | \n",
+ " 1.597500 | \n",
+ "
\n",
+ " \n",
+ " 39 | \n",
+ " 1.703800 | \n",
+ "
\n",
+ " \n",
+ " 40 | \n",
+ " 1.556500 | \n",
+ "
\n",
+ " \n",
+ " 41 | \n",
+ " 1.451800 | \n",
+ "
\n",
+ " \n",
+ " 42 | \n",
+ " 1.629500 | \n",
+ "
\n",
+ " \n",
+ " 43 | \n",
+ " 1.538500 | \n",
+ "
\n",
+ " \n",
+ " 44 | \n",
+ " 1.508600 | \n",
+ "
\n",
+ " \n",
+ " 45 | \n",
+ " 1.439400 | \n",
+ "
\n",
+ " \n",
+ " 46 | \n",
+ " 1.590000 | \n",
+ "
\n",
+ " \n",
+ " 47 | \n",
+ " 1.568200 | \n",
+ "
\n",
+ " \n",
+ " 48 | \n",
+ " 1.554900 | \n",
+ "
\n",
+ " \n",
+ " 49 | \n",
+ " 1.486900 | \n",
+ "
\n",
+ " \n",
+ " 50 | \n",
+ " 1.617100 | \n",
+ "
\n",
+ " \n",
+ " 51 | \n",
+ " 1.695700 | \n",
+ "
\n",
+ " \n",
+ " 52 | \n",
+ " 1.470600 | \n",
+ "
\n",
+ " \n",
+ " 53 | \n",
+ " 1.680400 | \n",
+ "
\n",
+ " \n",
+ " 54 | \n",
+ " 1.605500 | \n",
+ "
\n",
+ " \n",
+ " 55 | \n",
+ " 1.472900 | \n",
+ "
\n",
+ " \n",
+ " 56 | \n",
+ " 1.636600 | \n",
+ "
\n",
+ " \n",
+ " 57 | \n",
+ " 1.527600 | \n",
+ "
\n",
+ " \n",
+ " 58 | \n",
+ " 1.579300 | \n",
+ "
\n",
+ " \n",
+ " 59 | \n",
+ " 1.551700 | \n",
+ "
\n",
+ " \n",
+ " 60 | \n",
+ " 1.503900 | \n",
+ "
\n",
+ " \n",
+ " 61 | \n",
+ " 1.364500 | \n",
+ "
\n",
+ " \n",
+ " 62 | \n",
+ " 1.575300 | \n",
+ "
\n",
+ " \n",
+ " 63 | \n",
+ " 1.516700 | \n",
+ "
\n",
+ " \n",
+ " 64 | \n",
+ " 1.632000 | \n",
+ "
\n",
+ " \n",
+ " 65 | \n",
+ " 1.430900 | \n",
+ "
\n",
+ " \n",
+ " 66 | \n",
+ " 1.542000 | \n",
+ "
\n",
+ " \n",
+ " 67 | \n",
+ " 1.609800 | \n",
+ "
\n",
+ " \n",
+ " 68 | \n",
+ " 1.647700 | \n",
+ "
\n",
+ " \n",
+ " 69 | \n",
+ " 1.478100 | \n",
+ "
\n",
+ " \n",
+ " 70 | \n",
+ " 1.328200 | \n",
+ "
\n",
+ " \n",
+ " 71 | \n",
+ " 1.725000 | \n",
+ "
\n",
+ " \n",
+ " 72 | \n",
+ " 1.522400 | \n",
+ "
\n",
+ " \n",
+ " 73 | \n",
+ " 1.557200 | \n",
+ "
\n",
+ " \n",
+ " 74 | \n",
+ " 1.670000 | \n",
+ "
\n",
+ " \n",
+ " 75 | \n",
+ " 1.648900 | \n",
+ "
\n",
+ " \n",
+ " 76 | \n",
+ " 1.670400 | \n",
+ "
\n",
+ " \n",
+ " 77 | \n",
+ " 1.615300 | \n",
+ "
\n",
+ " \n",
+ " 78 | \n",
+ " 1.541800 | \n",
+ "
\n",
+ " \n",
+ " 79 | \n",
+ " 1.549200 | \n",
+ "
\n",
+ " \n",
+ " 80 | \n",
+ " 1.544500 | \n",
+ "
\n",
+ " \n",
+ " 81 | \n",
+ " 1.423300 | \n",
+ "
\n",
+ " \n",
+ " 82 | \n",
+ " 1.300900 | \n",
+ "
\n",
+ " \n",
+ " 83 | \n",
+ " 1.626600 | \n",
+ "
\n",
+ " \n",
+ " 84 | \n",
+ " 1.585000 | \n",
+ "
\n",
+ " \n",
+ " 85 | \n",
+ " 1.444500 | \n",
+ "
\n",
+ " \n",
+ " 86 | \n",
+ " 1.598200 | \n",
+ "
\n",
+ " \n",
+ " 87 | \n",
+ " 1.541000 | \n",
+ "
\n",
+ " \n",
+ " 88 | \n",
+ " 1.429500 | \n",
+ "
\n",
+ " \n",
+ " 89 | \n",
+ " 1.517300 | \n",
+ "
\n",
+ " \n",
+ " 90 | \n",
+ " 1.539100 | \n",
+ "
\n",
+ " \n",
+ " 91 | \n",
+ " 1.604200 | \n",
+ "
\n",
+ " \n",
+ " 92 | \n",
+ " 1.504300 | \n",
+ "
\n",
+ " \n",
+ " 93 | \n",
+ " 1.520200 | \n",
+ "
\n",
+ " \n",
+ " 94 | \n",
+ " 1.459000 | \n",
+ "
\n",
+ " \n",
+ " 95 | \n",
+ " 1.619900 | \n",
+ "
\n",
+ " \n",
+ " 96 | \n",
+ " 1.629000 | \n",
+ "
\n",
+ " \n",
+ " 97 | \n",
+ " 1.507000 | \n",
+ "
\n",
+ " \n",
+ " 98 | \n",
+ " 1.455300 | \n",
+ "
\n",
+ " \n",
+ " 99 | \n",
+ " 1.461700 | \n",
+ "
\n",
+ " \n",
+ " 100 | \n",
+ " 1.513500 | \n",
+ "
\n",
+ " \n",
+ " 101 | \n",
+ " 1.521500 | \n",
+ "
\n",
+ " \n",
+ " 102 | \n",
+ " 1.658100 | \n",
+ "
\n",
+ " \n",
+ " 103 | \n",
+ " 1.579500 | \n",
+ "
\n",
+ " \n",
+ " 104 | \n",
+ " 1.430100 | \n",
+ "
\n",
+ " \n",
+ " 105 | \n",
+ " 1.591500 | \n",
+ "
\n",
+ " \n",
+ " 106 | \n",
+ " 1.620900 | \n",
+ "
\n",
+ " \n",
+ " 107 | \n",
+ " 1.681300 | \n",
+ "
\n",
+ " \n",
+ " 108 | \n",
+ " 1.662900 | \n",
+ "
\n",
+ " \n",
+ " 109 | \n",
+ " 1.717200 | \n",
+ "
\n",
+ " \n",
+ " 110 | \n",
+ " 1.656000 | \n",
+ "
\n",
+ " \n",
+ " 111 | \n",
+ " 1.545400 | \n",
+ "
\n",
+ " \n",
+ " 112 | \n",
+ " 1.434400 | \n",
+ "
\n",
+ " \n",
+ " 113 | \n",
+ " 1.665900 | \n",
+ "
\n",
+ " \n",
+ " 114 | \n",
+ " 1.483000 | \n",
+ "
\n",
+ " \n",
+ " 115 | \n",
+ " 1.411300 | \n",
+ "
\n",
+ " \n",
+ " 116 | \n",
+ " 1.549000 | \n",
+ "
\n",
+ " \n",
+ " 117 | \n",
+ " 1.627200 | \n",
+ "
\n",
+ " \n",
+ " 118 | \n",
+ " 1.608600 | \n",
+ "
\n",
+ " \n",
+ " 119 | \n",
+ " 1.549700 | \n",
+ "
\n",
+ " \n",
+ " 120 | \n",
+ " 1.560800 | \n",
+ "
\n",
+ " \n",
+ " 121 | \n",
+ " 1.581400 | \n",
+ "
\n",
+ " \n",
+ " 122 | \n",
+ " 1.586100 | \n",
+ "
\n",
+ " \n",
+ " 123 | \n",
+ " 1.442700 | \n",
+ "
\n",
+ " \n",
+ " 124 | \n",
+ " 1.666800 | \n",
+ "
\n",
+ " \n",
+ " 125 | \n",
+ " 1.563900 | \n",
+ "
\n",
+ " \n",
+ " 126 | \n",
+ " 1.550300 | \n",
+ "
\n",
+ " \n",
+ " 127 | \n",
+ " 1.475600 | \n",
+ "
\n",
+ " \n",
+ " 128 | \n",
+ " 1.470400 | \n",
+ "
\n",
+ " \n",
+ " 129 | \n",
+ " 1.605000 | \n",
+ "
\n",
+ " \n",
+ " 130 | \n",
+ " 1.546100 | \n",
+ "
\n",
+ " \n",
+ " 131 | \n",
+ " 1.552900 | \n",
+ "
\n",
+ " \n",
+ " 132 | \n",
+ " 1.562300 | \n",
+ "
\n",
+ " \n",
+ " 133 | \n",
+ " 1.468900 | \n",
+ "
\n",
+ " \n",
+ " 134 | \n",
+ " 1.368200 | \n",
+ "
\n",
+ " \n",
+ " 135 | \n",
+ " 1.545800 | \n",
+ "
\n",
+ " \n",
+ " 136 | \n",
+ " 1.519900 | \n",
+ "
\n",
+ " \n",
+ " 137 | \n",
+ " 1.646300 | \n",
+ "
\n",
+ " \n",
+ " 138 | \n",
+ " 1.588800 | \n",
+ "
\n",
+ " \n",
+ " 139 | \n",
+ " 1.550300 | \n",
+ "
\n",
+ " \n",
+ " 140 | \n",
+ " 1.484800 | \n",
+ "
\n",
+ " \n",
+ " 141 | \n",
+ " 1.581600 | \n",
+ "
\n",
+ " \n",
+ " 142 | \n",
+ " 1.623200 | \n",
+ "
\n",
+ " \n",
+ " 143 | \n",
+ " 1.664700 | \n",
+ "
\n",
+ " \n",
+ " 144 | \n",
+ " 1.538800 | \n",
+ "
\n",
+ " \n",
+ " 145 | \n",
+ " 1.662800 | \n",
+ "
\n",
+ " \n",
+ " 146 | \n",
+ " 1.593500 | \n",
+ "
\n",
+ " \n",
+ " 147 | \n",
+ " 1.419500 | \n",
+ "
\n",
+ " \n",
+ " 148 | \n",
+ " 1.656200 | \n",
+ "
\n",
+ " \n",
+ " 149 | \n",
+ " 1.479400 | \n",
+ "
\n",
+ " \n",
+ " 150 | \n",
+ " 1.512500 | \n",
+ "
\n",
+ " \n",
+ " 151 | \n",
+ " 1.528800 | \n",
+ "
\n",
+ " \n",
+ " 152 | \n",
+ " 1.500800 | \n",
+ "
\n",
+ " \n",
+ " 153 | \n",
+ " 1.597800 | \n",
+ "
\n",
+ " \n",
+ " 154 | \n",
+ " 1.548600 | \n",
+ "
\n",
+ " \n",
+ " 155 | \n",
+ " 1.626200 | \n",
+ "
\n",
+ " \n",
+ " 156 | \n",
+ " 1.633400 | \n",
+ "
\n",
+ " \n",
+ " 157 | \n",
+ " 1.536100 | \n",
+ "
\n",
+ " \n",
+ " 158 | \n",
+ " 1.535300 | \n",
+ "
\n",
+ " \n",
+ " 159 | \n",
+ " 1.571300 | \n",
+ "
\n",
+ " \n",
+ " 160 | \n",
+ " 1.461200 | \n",
+ "
\n",
+ " \n",
+ " 161 | \n",
+ " 1.516200 | \n",
+ "
\n",
+ " \n",
+ " 162 | \n",
+ " 1.465500 | \n",
+ "
\n",
+ " \n",
+ " 163 | \n",
+ " 1.563900 | \n",
+ "
\n",
+ " \n",
+ " 164 | \n",
+ " 1.599900 | \n",
+ "
\n",
+ " \n",
+ " 165 | \n",
+ " 1.494400 | \n",
+ "
\n",
+ " \n",
+ " 166 | \n",
+ " 1.550500 | \n",
+ "
\n",
+ " \n",
+ " 167 | \n",
+ " 1.382100 | \n",
+ "
\n",
+ " \n",
+ " 168 | \n",
+ " 1.550800 | \n",
+ "
\n",
+ " \n",
+ " 169 | \n",
+ " 1.554000 | \n",
+ "
\n",
+ " \n",
+ " 170 | \n",
+ " 1.499200 | \n",
+ "
\n",
+ " \n",
+ " 171 | \n",
+ " 1.619500 | \n",
+ "
\n",
+ " \n",
+ " 172 | \n",
+ " 1.571800 | \n",
+ "
\n",
+ " \n",
+ " 173 | \n",
+ " 1.552700 | \n",
+ "
\n",
+ " \n",
+ " 174 | \n",
+ " 1.360500 | \n",
+ "
\n",
+ " \n",
+ " 175 | \n",
+ " 1.457600 | \n",
+ "
\n",
+ " \n",
+ " 176 | \n",
+ " 1.528500 | \n",
+ "
\n",
+ " \n",
+ " 177 | \n",
+ " 1.450600 | \n",
+ "
\n",
+ " \n",
+ " 178 | \n",
+ " 1.497100 | \n",
+ "
\n",
+ " \n",
+ " 179 | \n",
+ " 1.415400 | \n",
+ "
\n",
+ " \n",
+ " 180 | \n",
+ " 1.549900 | \n",
+ "
\n",
+ " \n",
+ " 181 | \n",
+ " 1.459800 | \n",
+ "
\n",
+ " \n",
+ " 182 | \n",
+ " 1.653100 | \n",
+ "
\n",
+ " \n",
+ " 183 | \n",
+ " 1.255300 | \n",
+ "
\n",
+ " \n",
+ " 184 | \n",
+ " 1.511100 | \n",
+ "
\n",
+ " \n",
+ " 185 | \n",
+ " 1.487700 | \n",
+ "
\n",
+ " \n",
+ " 186 | \n",
+ " 1.678500 | \n",
+ "
\n",
+ " \n",
+ " 187 | \n",
+ " 1.566400 | \n",
+ "
\n",
+ " \n",
+ " 188 | \n",
+ " 1.479300 | \n",
+ "
\n",
+ " \n",
+ " 189 | \n",
+ " 1.503900 | \n",
+ "
\n",
+ " \n",
+ " 190 | \n",
+ " 1.493700 | \n",
+ "
\n",
+ " \n",
+ " 191 | \n",
+ " 1.468400 | \n",
+ "
\n",
+ " \n",
+ " 192 | \n",
+ " 1.499400 | \n",
+ "
\n",
+ " \n",
+ " 193 | \n",
+ " 1.462300 | \n",
+ "
\n",
+ " \n",
+ " 194 | \n",
+ " 1.606200 | \n",
+ "
\n",
+ " \n",
+ " 195 | \n",
+ " 1.726000 | \n",
+ "
\n",
+ " \n",
+ " 196 | \n",
+ " 1.424700 | \n",
+ "
\n",
+ " \n",
+ " 197 | \n",
+ " 1.560500 | \n",
+ "
\n",
+ " \n",
+ " 198 | \n",
+ " 1.572200 | \n",
+ "
\n",
+ " \n",
+ " 199 | \n",
+ " 1.694600 | \n",
+ "
\n",
+ " \n",
+ " 200 | \n",
+ " 1.508900 | \n",
+ "
\n",
+ " \n",
+ " 201 | \n",
+ " 1.465600 | \n",
+ "
\n",
+ " \n",
+ " 202 | \n",
+ " 1.533500 | \n",
+ "
\n",
+ " \n",
+ " 203 | \n",
+ " 1.531400 | \n",
+ "
\n",
+ " \n",
+ " 204 | \n",
+ " 1.543200 | \n",
+ "
\n",
+ " \n",
+ " 205 | \n",
+ " 1.546500 | \n",
+ "
\n",
+ " \n",
+ " 206 | \n",
+ " 1.568600 | \n",
+ "
\n",
+ " \n",
+ " 207 | \n",
+ " 1.437200 | \n",
+ "
\n",
+ " \n",
+ " 208 | \n",
+ " 1.524100 | \n",
+ "
\n",
+ " \n",
+ " 209 | \n",
+ " 1.644300 | \n",
+ "
\n",
+ " \n",
+ " 210 | \n",
+ " 1.412500 | \n",
+ "
\n",
+ " \n",
+ " 211 | \n",
+ " 1.604700 | \n",
+ "
\n",
+ " \n",
+ " 212 | \n",
+ " 1.538300 | \n",
+ "
\n",
+ " \n",
+ " 213 | \n",
+ " 1.552600 | \n",
+ "
\n",
+ " \n",
+ " 214 | \n",
+ " 1.654100 | \n",
+ "
\n",
+ " \n",
+ " 215 | \n",
+ " 1.632300 | \n",
+ "
\n",
+ " \n",
+ " 216 | \n",
+ " 1.634200 | \n",
+ "
\n",
+ " \n",
+ " 217 | \n",
+ " 1.562400 | \n",
+ "
\n",
+ " \n",
+ " 218 | \n",
+ " 1.528000 | \n",
+ "
\n",
+ " \n",
+ " 219 | \n",
+ " 1.444400 | \n",
+ "
\n",
+ " \n",
+ " 220 | \n",
+ " 1.449800 | \n",
+ "
\n",
+ " \n",
+ " 221 | \n",
+ " 1.561900 | \n",
+ "
\n",
+ " \n",
+ " 222 | \n",
+ " 1.565400 | \n",
+ "
\n",
+ " \n",
+ " 223 | \n",
+ " 1.526800 | \n",
+ "
\n",
+ " \n",
+ " 224 | \n",
+ " 1.422900 | \n",
+ "
\n",
+ " \n",
+ " 225 | \n",
+ " 1.514200 | \n",
+ "
\n",
+ " \n",
+ " 226 | \n",
+ " 1.663700 | \n",
+ "
\n",
+ " \n",
+ " 227 | \n",
+ " 1.402100 | \n",
+ "
\n",
+ " \n",
+ " 228 | \n",
+ " 1.536400 | \n",
+ "
\n",
+ " \n",
+ " 229 | \n",
+ " 1.411200 | \n",
+ "
\n",
+ " \n",
+ " 230 | \n",
+ " 1.582300 | \n",
+ "
\n",
+ " \n",
+ " 231 | \n",
+ " 1.489300 | \n",
+ "
\n",
+ " \n",
+ " 232 | \n",
+ " 1.531800 | \n",
+ "
\n",
+ " \n",
+ " 233 | \n",
+ " 1.509500 | \n",
+ "
\n",
+ " \n",
+ " 234 | \n",
+ " 1.514100 | \n",
+ "
\n",
+ " \n",
+ " 235 | \n",
+ " 1.503800 | \n",
+ "
\n",
+ " \n",
+ " 236 | \n",
+ " 1.558800 | \n",
+ "
\n",
+ " \n",
+ " 237 | \n",
+ " 1.433500 | \n",
+ "
\n",
+ " \n",
+ " 238 | \n",
+ " 1.593100 | \n",
+ "
\n",
+ " \n",
+ " 239 | \n",
+ " 1.442500 | \n",
+ "
\n",
+ " \n",
+ " 240 | \n",
+ " 1.458900 | \n",
+ "
\n",
+ " \n",
+ " 241 | \n",
+ " 1.609300 | \n",
+ "
\n",
+ " \n",
+ " 242 | \n",
+ " 1.368500 | \n",
+ "
\n",
+ " \n",
+ " 243 | \n",
+ " 1.488500 | \n",
+ "
\n",
+ " \n",
+ " 244 | \n",
+ " 1.495500 | \n",
+ "
\n",
+ " \n",
+ " 245 | \n",
+ " 1.587800 | \n",
+ "
\n",
+ " \n",
+ " 246 | \n",
+ " 1.597700 | \n",
+ "
\n",
+ " \n",
+ " 247 | \n",
+ " 1.337800 | \n",
+ "
\n",
+ " \n",
+ " 248 | \n",
+ " 1.527200 | \n",
+ "
\n",
+ " \n",
+ " 249 | \n",
+ " 1.343900 | \n",
+ "
\n",
+ " \n",
+ " 250 | \n",
+ " 1.376000 | \n",
+ "
\n",
+ " \n",
+ " 251 | \n",
+ " 1.506100 | \n",
+ "
\n",
+ " \n",
+ " 252 | \n",
+ " 1.415800 | \n",
+ "
\n",
+ " \n",
+ " 253 | \n",
+ " 1.528500 | \n",
+ "
\n",
+ " \n",
+ " 254 | \n",
+ " 1.499300 | \n",
+ "
\n",
+ " \n",
+ " 255 | \n",
+ " 1.605400 | \n",
+ "
\n",
+ " \n",
+ " 256 | \n",
+ " 1.471000 | \n",
+ "
\n",
+ " \n",
+ " 257 | \n",
+ " 1.507400 | \n",
+ "
\n",
+ " \n",
+ " 258 | \n",
+ " 1.471800 | \n",
+ "
\n",
+ " \n",
+ " 259 | \n",
+ " 1.460100 | \n",
+ "
\n",
+ " \n",
+ " 260 | \n",
+ " 1.623500 | \n",
+ "
\n",
+ " \n",
+ " 261 | \n",
+ " 1.470000 | \n",
+ "
\n",
+ " \n",
+ " 262 | \n",
+ " 1.317300 | \n",
+ "
\n",
+ " \n",
+ " 263 | \n",
+ " 1.381800 | \n",
+ "
\n",
+ " \n",
+ " 264 | \n",
+ " 1.381500 | \n",
+ "
\n",
+ " \n",
+ " 265 | \n",
+ " 1.475200 | \n",
+ "
\n",
+ " \n",
+ " 266 | \n",
+ " 1.511700 | \n",
+ "
\n",
+ " \n",
+ " 267 | \n",
+ " 1.524100 | \n",
+ "
\n",
+ " \n",
+ " 268 | \n",
+ " 1.487300 | \n",
+ "
\n",
+ " \n",
+ " 269 | \n",
+ " 1.331600 | \n",
+ "
\n",
+ " \n",
+ " 270 | \n",
+ " 1.479500 | \n",
+ "
\n",
+ " \n",
+ " 271 | \n",
+ " 1.474400 | \n",
+ "
\n",
+ " \n",
+ " 272 | \n",
+ " 1.530400 | \n",
+ "
\n",
+ " \n",
+ " 273 | \n",
+ " 1.520800 | \n",
+ "
\n",
+ " \n",
+ " 274 | \n",
+ " 1.613700 | \n",
+ "
\n",
+ " \n",
+ " 275 | \n",
+ " 1.543800 | \n",
+ "
\n",
+ " \n",
+ " 276 | \n",
+ " 1.588600 | \n",
+ "
\n",
+ " \n",
+ " 277 | \n",
+ " 1.462600 | \n",
+ "
\n",
+ " \n",
+ " 278 | \n",
+ " 1.433200 | \n",
+ "
\n",
+ " \n",
+ " 279 | \n",
+ " 1.508600 | \n",
+ "
\n",
+ " \n",
+ " 280 | \n",
+ " 1.401300 | \n",
+ "
\n",
+ " \n",
+ " 281 | \n",
+ " 1.486700 | \n",
+ "
\n",
+ " \n",
+ " 282 | \n",
+ " 1.590800 | \n",
+ "
\n",
+ " \n",
+ " 283 | \n",
+ " 1.455800 | \n",
+ "
\n",
+ " \n",
+ " 284 | \n",
+ " 1.442800 | \n",
+ "
\n",
+ " \n",
+ " 285 | \n",
+ " 1.660000 | \n",
+ "
\n",
+ " \n",
+ " 286 | \n",
+ " 1.642900 | \n",
+ "
\n",
+ " \n",
+ " 287 | \n",
+ " 1.431400 | \n",
+ "
\n",
+ " \n",
+ " 288 | \n",
+ " 1.575100 | \n",
+ "
\n",
+ " \n",
+ " 289 | \n",
+ " 1.557800 | \n",
+ "
\n",
+ " \n",
+ " 290 | \n",
+ " 1.553200 | \n",
+ "
\n",
+ " \n",
+ " 291 | \n",
+ " 1.541500 | \n",
+ "
\n",
+ " \n",
+ " 292 | \n",
+ " 1.531600 | \n",
+ "
\n",
+ " \n",
+ " 293 | \n",
+ " 1.489800 | \n",
+ "
\n",
+ " \n",
+ " 294 | \n",
+ " 1.561100 | \n",
+ "
\n",
+ " \n",
+ " 295 | \n",
+ " 1.524400 | \n",
+ "
\n",
+ " \n",
+ " 296 | \n",
+ " 1.421400 | \n",
+ "
\n",
+ " \n",
+ " 297 | \n",
+ " 1.466800 | \n",
+ "
\n",
+ " \n",
+ " 298 | \n",
+ " 1.526200 | \n",
+ "
\n",
+ " \n",
+ " 299 | \n",
+ " 1.411400 | \n",
+ "
\n",
+ " \n",
+ " 300 | \n",
+ " 1.428100 | \n",
+ "
\n",
+ " \n",
+ " 301 | \n",
+ " 1.464500 | \n",
+ "
\n",
+ " \n",
+ " 302 | \n",
+ " 1.460000 | \n",
+ "
\n",
+ " \n",
+ " 303 | \n",
+ " 1.522700 | \n",
+ "
\n",
+ " \n",
+ " 304 | \n",
+ " 1.533100 | \n",
+ "
\n",
+ " \n",
+ " 305 | \n",
+ " 1.464400 | \n",
+ "
\n",
+ " \n",
+ " 306 | \n",
+ " 1.545100 | \n",
+ "
\n",
+ " \n",
+ " 307 | \n",
+ " 1.506800 | \n",
+ "
\n",
+ " \n",
+ " 308 | \n",
+ " 1.508500 | \n",
+ "
\n",
+ " \n",
+ " 309 | \n",
+ " 1.576900 | \n",
+ "
\n",
+ " \n",
+ " 310 | \n",
+ " 1.587500 | \n",
+ "
\n",
+ " \n",
+ " 311 | \n",
+ " 1.397800 | \n",
+ "
\n",
+ " \n",
+ " 312 | \n",
+ " 1.478100 | \n",
+ "
\n",
+ " \n",
+ " 313 | \n",
+ " 1.484200 | \n",
+ "
\n",
+ " \n",
+ " 314 | \n",
+ " 1.428500 | \n",
+ "
\n",
+ " \n",
+ " 315 | \n",
+ " 1.520700 | \n",
+ "
\n",
+ " \n",
+ " 316 | \n",
+ " 1.464100 | \n",
+ "
\n",
+ " \n",
+ " 317 | \n",
+ " 1.412200 | \n",
+ "
\n",
+ " \n",
+ " 318 | \n",
+ " 1.493000 | \n",
+ "
\n",
+ " \n",
+ " 319 | \n",
+ " 1.514200 | \n",
+ "
\n",
+ " \n",
+ " 320 | \n",
+ " 1.538200 | \n",
+ "
\n",
+ " \n",
+ " 321 | \n",
+ " 1.537100 | \n",
+ "
\n",
+ " \n",
+ " 322 | \n",
+ " 1.470500 | \n",
+ "
\n",
+ " \n",
+ " 323 | \n",
+ " 1.361800 | \n",
+ "
\n",
+ " \n",
+ " 324 | \n",
+ " 1.540100 | \n",
+ "
\n",
+ " \n",
+ " 325 | \n",
+ " 1.583800 | \n",
+ "
\n",
+ " \n",
+ " 326 | \n",
+ " 1.411400 | \n",
+ "
\n",
+ " \n",
+ " 327 | \n",
+ " 1.585000 | \n",
+ "
\n",
+ " \n",
+ " 328 | \n",
+ " 1.561200 | \n",
+ "
\n",
+ " \n",
+ " 329 | \n",
+ " 1.441400 | \n",
+ "
\n",
+ " \n",
+ " 330 | \n",
+ " 1.443100 | \n",
+ "
\n",
+ " \n",
+ " 331 | \n",
+ " 1.487900 | \n",
+ "
\n",
+ " \n",
+ " 332 | \n",
+ " 1.441400 | \n",
+ "
\n",
+ " \n",
+ " 333 | \n",
+ " 1.502100 | \n",
+ "
\n",
+ " \n",
+ " 334 | \n",
+ " 1.680100 | \n",
+ "
\n",
+ " \n",
+ " 335 | \n",
+ " 1.718200 | \n",
+ "
\n",
+ " \n",
+ " 336 | \n",
+ " 1.613200 | \n",
+ "
\n",
+ " \n",
+ " 337 | \n",
+ " 1.428600 | \n",
+ "
\n",
+ " \n",
+ " 338 | \n",
+ " 1.659800 | \n",
+ "
\n",
+ " \n",
+ " 339 | \n",
+ " 1.550100 | \n",
+ "
\n",
+ " \n",
+ " 340 | \n",
+ " 1.479900 | \n",
+ "
\n",
+ " \n",
+ " 341 | \n",
+ " 1.512500 | \n",
+ "
\n",
+ " \n",
+ " 342 | \n",
+ " 1.371900 | \n",
+ "
\n",
+ " \n",
+ " 343 | \n",
+ " 1.418200 | \n",
+ "
\n",
+ " \n",
+ " 344 | \n",
+ " 1.605200 | \n",
+ "
\n",
+ " \n",
+ " 345 | \n",
+ " 1.455900 | \n",
+ "
\n",
+ " \n",
+ " 346 | \n",
+ " 1.413300 | \n",
+ "
\n",
+ " \n",
+ " 347 | \n",
+ " 1.463400 | \n",
+ "
\n",
+ " \n",
+ " 348 | \n",
+ " 1.459700 | \n",
+ "
\n",
+ " \n",
+ " 349 | \n",
+ " 1.473400 | \n",
+ "
\n",
+ " \n",
+ " 350 | \n",
+ " 1.467900 | \n",
+ "
\n",
+ " \n",
+ " 351 | \n",
+ " 1.424800 | \n",
+ "
\n",
+ " \n",
+ " 352 | \n",
+ " 1.607200 | \n",
+ "
\n",
+ " \n",
+ " 353 | \n",
+ " 1.697500 | \n",
+ "
\n",
+ " \n",
+ " 354 | \n",
+ " 1.510900 | \n",
+ "
\n",
+ " \n",
+ " 355 | \n",
+ " 1.606700 | \n",
+ "
\n",
+ " \n",
+ " 356 | \n",
+ " 1.639400 | \n",
+ "
\n",
+ " \n",
+ " 357 | \n",
+ " 1.460200 | \n",
+ "
\n",
+ " \n",
+ " 358 | \n",
+ " 1.456100 | \n",
+ "
\n",
+ " \n",
+ " 359 | \n",
+ " 1.393600 | \n",
+ "
\n",
+ " \n",
+ " 360 | \n",
+ " 1.477500 | \n",
+ "
\n",
+ " \n",
+ " 361 | \n",
+ " 1.438100 | \n",
+ "
\n",
+ " \n",
+ " 362 | \n",
+ " 1.412900 | \n",
+ "
\n",
+ " \n",
+ " 363 | \n",
+ " 1.564800 | \n",
+ "
\n",
+ " \n",
+ " 364 | \n",
+ " 1.423000 | \n",
+ "
\n",
+ " \n",
+ " 365 | \n",
+ " 1.517000 | \n",
+ "
\n",
+ " \n",
+ " 366 | \n",
+ " 1.378000 | \n",
+ "
\n",
+ " \n",
+ " 367 | \n",
+ " 1.541300 | \n",
+ "
\n",
+ " \n",
+ " 368 | \n",
+ " 1.426400 | \n",
+ "
\n",
+ " \n",
+ " 369 | \n",
+ " 1.512400 | \n",
+ "
\n",
+ " \n",
+ " 370 | \n",
+ " 1.470800 | \n",
+ "
\n",
+ " \n",
+ " 371 | \n",
+ " 1.514200 | \n",
+ "
\n",
+ " \n",
+ " 372 | \n",
+ " 1.480300 | \n",
+ "
\n",
+ " \n",
+ " 373 | \n",
+ " 1.489100 | \n",
+ "
\n",
+ " \n",
+ " 374 | \n",
+ " 1.546200 | \n",
+ "
\n",
+ " \n",
+ " 375 | \n",
+ " 1.481200 | \n",
+ "
\n",
+ " \n",
+ " 376 | \n",
+ " 1.476000 | \n",
+ "
\n",
+ " \n",
+ " 377 | \n",
+ " 1.385400 | \n",
+ "
\n",
+ " \n",
+ " 378 | \n",
+ " 1.613200 | \n",
+ "
\n",
+ " \n",
+ " 379 | \n",
+ " 1.245500 | \n",
+ "
\n",
+ " \n",
+ " 380 | \n",
+ " 1.312100 | \n",
+ "
\n",
+ " \n",
+ " 381 | \n",
+ " 1.396700 | \n",
+ "
\n",
+ " \n",
+ " 382 | \n",
+ " 1.501400 | \n",
+ "
\n",
+ " \n",
+ " 383 | \n",
+ " 1.405100 | \n",
+ "
\n",
+ " \n",
+ " 384 | \n",
+ " 1.481700 | \n",
+ "
\n",
+ " \n",
+ " 385 | \n",
+ " 1.520400 | \n",
+ "
\n",
+ " \n",
+ " 386 | \n",
+ " 1.596300 | \n",
+ "
\n",
+ " \n",
+ " 387 | \n",
+ " 1.585500 | \n",
+ "
\n",
+ " \n",
+ " 388 | \n",
+ " 1.557700 | \n",
+ "
\n",
+ " \n",
+ " 389 | \n",
+ " 1.432000 | \n",
+ "
\n",
+ " \n",
+ " 390 | \n",
+ " 1.627200 | \n",
+ "
\n",
+ " \n",
+ " 391 | \n",
+ " 1.498900 | \n",
+ "
\n",
+ " \n",
+ " 392 | \n",
+ " 1.583700 | \n",
+ "
\n",
+ " \n",
+ " 393 | \n",
+ " 1.411800 | \n",
+ "
\n",
+ " \n",
+ " 394 | \n",
+ " 1.454600 | \n",
+ "
\n",
+ " \n",
+ " 395 | \n",
+ " 1.532200 | \n",
+ "
\n",
+ " \n",
+ " 396 | \n",
+ " 1.443000 | \n",
+ "
\n",
+ " \n",
+ " 397 | \n",
+ " 1.358000 | \n",
+ "
\n",
+ " \n",
+ " 398 | \n",
+ " 1.400200 | \n",
+ "
\n",
+ " \n",
+ " 399 | \n",
+ " 1.493300 | \n",
+ "
\n",
+ " \n",
+ " 400 | \n",
+ " 1.387900 | \n",
+ "
\n",
+ " \n",
+ " 401 | \n",
+ " 1.430900 | \n",
+ "
\n",
+ " \n",
+ " 402 | \n",
+ " 1.485400 | \n",
+ "
\n",
+ " \n",
+ " 403 | \n",
+ " 1.757100 | \n",
+ "
\n",
+ " \n",
+ " 404 | \n",
+ " 1.606100 | \n",
+ "
\n",
+ " \n",
+ " 405 | \n",
+ " 1.570100 | \n",
+ "
\n",
+ " \n",
+ " 406 | \n",
+ " 1.600700 | \n",
+ "
\n",
+ " \n",
+ " 407 | \n",
+ " 1.489300 | \n",
+ "
\n",
+ " \n",
+ " 408 | \n",
+ " 1.570900 | \n",
+ "
\n",
+ " \n",
+ " 409 | \n",
+ " 1.442300 | \n",
+ "
\n",
+ " \n",
+ " 410 | \n",
+ " 1.504900 | \n",
+ "
\n",
+ " \n",
+ " 411 | \n",
+ " 1.406900 | \n",
+ "
\n",
+ " \n",
+ " 412 | \n",
+ " 1.600600 | \n",
+ "
\n",
+ " \n",
+ " 413 | \n",
+ " 1.362500 | \n",
+ "
\n",
+ " \n",
+ " 414 | \n",
+ " 1.527700 | \n",
+ "
\n",
+ " \n",
+ " 415 | \n",
+ " 1.509400 | \n",
+ "
\n",
+ " \n",
+ " 416 | \n",
+ " 1.619800 | \n",
+ "
\n",
+ " \n",
+ " 417 | \n",
+ " 1.367200 | \n",
+ "
\n",
+ " \n",
+ " 418 | \n",
+ " 1.440800 | \n",
+ "
\n",
+ " \n",
+ " 419 | \n",
+ " 1.523200 | \n",
+ "
\n",
+ " \n",
+ " 420 | \n",
+ " 1.507500 | \n",
+ "
\n",
+ " \n",
+ " 421 | \n",
+ " 1.473100 | \n",
+ "
\n",
+ " \n",
+ " 422 | \n",
+ " 1.406900 | \n",
+ "
\n",
+ " \n",
+ " 423 | \n",
+ " 1.417000 | \n",
+ "
\n",
+ " \n",
+ " 424 | \n",
+ " 1.462700 | \n",
+ "
\n",
+ " \n",
+ " 425 | \n",
+ " 1.536800 | \n",
+ "
\n",
+ " \n",
+ " 426 | \n",
+ " 1.545300 | \n",
+ "
\n",
+ " \n",
+ " 427 | \n",
+ " 1.457400 | \n",
+ "
\n",
+ " \n",
+ " 428 | \n",
+ " 1.471200 | \n",
+ "
\n",
+ " \n",
+ " 429 | \n",
+ " 1.470500 | \n",
+ "
\n",
+ " \n",
+ " 430 | \n",
+ " 1.550000 | \n",
+ "
\n",
+ " \n",
+ " 431 | \n",
+ " 1.517700 | \n",
+ "
\n",
+ " \n",
+ " 432 | \n",
+ " 1.552500 | \n",
+ "
\n",
+ " \n",
+ " 433 | \n",
+ " 1.564900 | \n",
+ "
\n",
+ " \n",
+ " 434 | \n",
+ " 1.662400 | \n",
+ "
\n",
+ " \n",
+ " 435 | \n",
+ " 1.484900 | \n",
+ "
\n",
+ " \n",
+ " 436 | \n",
+ " 1.381200 | \n",
+ "
\n",
+ " \n",
+ " 437 | \n",
+ " 1.505900 | \n",
+ "
\n",
+ " \n",
+ " 438 | \n",
+ " 1.439100 | \n",
+ "
\n",
+ " \n",
+ " 439 | \n",
+ " 1.343900 | \n",
+ "
\n",
+ " \n",
+ " 440 | \n",
+ " 1.508700 | \n",
+ "
\n",
+ " \n",
+ " 441 | \n",
+ " 1.525400 | \n",
+ "
\n",
+ " \n",
+ " 442 | \n",
+ " 1.434000 | \n",
+ "
\n",
+ " \n",
+ " 443 | \n",
+ " 1.470400 | \n",
+ "
\n",
+ " \n",
+ " 444 | \n",
+ " 1.544200 | \n",
+ "
\n",
+ " \n",
+ " 445 | \n",
+ " 1.380300 | \n",
+ "
\n",
+ " \n",
+ " 446 | \n",
+ " 1.475500 | \n",
+ "
\n",
+ " \n",
+ " 447 | \n",
+ " 1.653600 | \n",
+ "
\n",
+ " \n",
+ " 448 | \n",
+ " 1.636300 | \n",
+ "
\n",
+ " \n",
+ " 449 | \n",
+ " 1.525200 | \n",
+ "
\n",
+ " \n",
+ " 450 | \n",
+ " 1.500500 | \n",
+ "
\n",
+ " \n",
+ " 451 | \n",
+ " 1.438000 | \n",
+ "
\n",
+ " \n",
+ " 452 | \n",
+ " 1.488800 | \n",
+ "
\n",
+ " \n",
+ " 453 | \n",
+ " 1.396300 | \n",
+ "
\n",
+ " \n",
+ " 454 | \n",
+ " 1.440200 | \n",
+ "
\n",
+ " \n",
+ " 455 | \n",
+ " 1.482000 | \n",
+ "
\n",
+ " \n",
+ " 456 | \n",
+ " 1.461400 | \n",
+ "
\n",
+ " \n",
+ " 457 | \n",
+ " 1.471400 | \n",
+ "
\n",
+ " \n",
+ " 458 | \n",
+ " 1.315300 | \n",
+ "
\n",
+ " \n",
+ " 459 | \n",
+ " 1.587200 | \n",
+ "
\n",
+ " \n",
+ " 460 | \n",
+ " 1.452000 | \n",
+ "
\n",
+ " \n",
+ " 461 | \n",
+ " 1.718700 | \n",
+ "
\n",
+ " \n",
+ " 462 | \n",
+ " 1.414400 | \n",
+ "
\n",
+ " \n",
+ " 463 | \n",
+ " 1.514500 | \n",
+ "
\n",
+ " \n",
+ " 464 | \n",
+ " 1.492100 | \n",
+ "
\n",
+ " \n",
+ " 465 | \n",
+ " 1.581400 | \n",
+ "
\n",
+ " \n",
+ " 466 | \n",
+ " 1.425000 | \n",
+ "
\n",
+ " \n",
+ " 467 | \n",
+ " 1.476900 | \n",
+ "
\n",
+ " \n",
+ " 468 | \n",
+ " 1.403700 | \n",
+ "
\n",
+ " \n",
+ " 469 | \n",
+ " 1.438700 | \n",
+ "
\n",
+ " \n",
+ " 470 | \n",
+ " 1.563300 | \n",
+ "
\n",
+ " \n",
+ " 471 | \n",
+ " 1.475600 | \n",
+ "
\n",
+ " \n",
+ " 472 | \n",
+ " 1.610700 | \n",
+ "
\n",
+ " \n",
+ " 473 | \n",
+ " 1.348700 | \n",
+ "
\n",
+ " \n",
+ " 474 | \n",
+ " 1.470000 | \n",
+ "
\n",
+ " \n",
+ " 475 | \n",
+ " 1.615400 | \n",
+ "
\n",
+ " \n",
+ " 476 | \n",
+ " 1.446700 | \n",
+ "
\n",
+ " \n",
+ " 477 | \n",
+ " 1.394500 | \n",
+ "
\n",
+ " \n",
+ " 478 | \n",
+ " 1.470600 | \n",
+ "
\n",
+ " \n",
+ " 479 | \n",
+ " 1.397700 | \n",
+ "
\n",
+ " \n",
+ " 480 | \n",
+ " 1.377500 | \n",
+ "
\n",
+ " \n",
+ " 481 | \n",
+ " 1.504900 | \n",
+ "
\n",
+ " \n",
+ " 482 | \n",
+ " 1.485500 | \n",
+ "
\n",
+ " \n",
+ " 483 | \n",
+ " 1.461600 | \n",
+ "
\n",
+ " \n",
+ " 484 | \n",
+ " 1.520600 | \n",
+ "
\n",
+ " \n",
+ " 485 | \n",
+ " 1.532300 | \n",
+ "
\n",
+ " \n",
+ " 486 | \n",
+ " 1.627200 | \n",
+ "
\n",
+ " \n",
+ " 487 | \n",
+ " 1.509800 | \n",
+ "
\n",
+ " \n",
+ " 488 | \n",
+ " 1.387400 | \n",
+ "
\n",
+ " \n",
+ " 489 | \n",
+ " 1.438900 | \n",
+ "
\n",
+ " \n",
+ " 490 | \n",
+ " 1.440700 | \n",
+ "
\n",
+ " \n",
+ " 491 | \n",
+ " 1.527900 | \n",
+ "
\n",
+ " \n",
+ " 492 | \n",
+ " 1.478900 | \n",
+ "
\n",
+ " \n",
+ " 493 | \n",
+ " 1.461900 | \n",
+ "
\n",
+ " \n",
+ " 494 | \n",
+ " 1.624800 | \n",
+ "
\n",
+ " \n",
+ " 495 | \n",
+ " 1.521600 | \n",
+ "
\n",
+ " \n",
+ " 496 | \n",
+ " 1.406800 | \n",
+ "
\n",
+ " \n",
+ " 497 | \n",
+ " 1.480600 | \n",
+ "
\n",
+ " \n",
+ " 498 | \n",
+ " 1.602300 | \n",
+ "
\n",
+ " \n",
+ " 499 | \n",
+ " 1.590400 | \n",
+ "
\n",
+ " \n",
+ " 500 | \n",
+ " 1.622000 | \n",
+ "
\n",
+ " \n",
+ " 501 | \n",
+ " 1.582400 | \n",
+ "
\n",
+ " \n",
+ " 502 | \n",
+ " 1.548000 | \n",
+ "
\n",
+ " \n",
+ " 503 | \n",
+ " 1.439800 | \n",
+ "
\n",
+ " \n",
+ " 504 | \n",
+ " 1.406300 | \n",
+ "
\n",
+ " \n",
+ " 505 | \n",
+ " 1.499700 | \n",
+ "
\n",
+ " \n",
+ " 506 | \n",
+ " 1.389400 | \n",
+ "
\n",
+ " \n",
+ " 507 | \n",
+ " 1.591000 | \n",
+ "
\n",
+ " \n",
+ " 508 | \n",
+ " 1.453000 | \n",
+ "
\n",
+ " \n",
+ " 509 | \n",
+ " 1.532200 | \n",
+ "
\n",
+ " \n",
+ " 510 | \n",
+ " 1.482900 | \n",
+ "
\n",
+ " \n",
+ " 511 | \n",
+ " 1.428800 | \n",
+ "
\n",
+ " \n",
+ " 512 | \n",
+ " 1.575800 | \n",
+ "
\n",
+ " \n",
+ " 513 | \n",
+ " 1.460300 | \n",
+ "
\n",
+ " \n",
+ " 514 | \n",
+ " 1.530200 | \n",
+ "
\n",
+ " \n",
+ " 515 | \n",
+ " 1.447100 | \n",
+ "
\n",
+ " \n",
+ " 516 | \n",
+ " 1.621300 | \n",
+ "
\n",
+ " \n",
+ " 517 | \n",
+ " 1.525500 | \n",
+ "
\n",
+ " \n",
+ " 518 | \n",
+ " 1.528700 | \n",
+ "
\n",
+ " \n",
+ " 519 | \n",
+ " 1.466200 | \n",
+ "
\n",
+ " \n",
+ " 520 | \n",
+ " 1.488700 | \n",
+ "
\n",
+ " \n",
+ " 521 | \n",
+ " 1.449400 | \n",
+ "
\n",
+ " \n",
+ " 522 | \n",
+ " 1.537600 | \n",
+ "
\n",
+ " \n",
+ " 523 | \n",
+ " 1.398400 | \n",
+ "
\n",
+ " \n",
+ " 524 | \n",
+ " 1.316700 | \n",
+ "
\n",
+ " \n",
+ " 525 | \n",
+ " 1.386100 | \n",
+ "
\n",
+ " \n",
+ " 526 | \n",
+ " 1.603900 | \n",
+ "
\n",
+ " \n",
+ " 527 | \n",
+ " 1.353800 | \n",
+ "
\n",
+ " \n",
+ " 528 | \n",
+ " 1.306700 | \n",
+ "
\n",
+ " \n",
+ " 529 | \n",
+ " 1.401600 | \n",
+ "
\n",
+ " \n",
+ " 530 | \n",
+ " 1.380400 | \n",
+ "
\n",
+ " \n",
+ " 531 | \n",
+ " 1.394900 | \n",
+ "
\n",
+ " \n",
+ " 532 | \n",
+ " 1.498300 | \n",
+ "
\n",
+ " \n",
+ " 533 | \n",
+ " 1.462200 | \n",
+ "
\n",
+ " \n",
+ " 534 | \n",
+ " 1.458100 | \n",
+ "
\n",
+ " \n",
+ " 535 | \n",
+ " 1.515000 | \n",
+ "
\n",
+ " \n",
+ " 536 | \n",
+ " 1.483900 | \n",
+ "
\n",
+ " \n",
+ " 537 | \n",
+ " 1.508600 | \n",
+ "
\n",
+ " \n",
+ " 538 | \n",
+ " 1.612800 | \n",
+ "
\n",
+ " \n",
+ " 539 | \n",
+ " 1.443400 | \n",
+ "
\n",
+ " \n",
+ " 540 | \n",
+ " 1.455600 | \n",
+ "
\n",
+ " \n",
+ " 541 | \n",
+ " 1.568900 | \n",
+ "
\n",
+ " \n",
+ " 542 | \n",
+ " 1.547600 | \n",
+ "
\n",
+ " \n",
+ " 543 | \n",
+ " 1.432400 | \n",
+ "
\n",
+ " \n",
+ " 544 | \n",
+ " 1.583800 | \n",
+ "
\n",
+ " \n",
+ " 545 | \n",
+ " 1.365600 | \n",
+ "
\n",
+ " \n",
+ " 546 | \n",
+ " 1.596500 | \n",
+ "
\n",
+ " \n",
+ " 547 | \n",
+ " 1.450600 | \n",
+ "
\n",
+ " \n",
+ " 548 | \n",
+ " 1.485400 | \n",
+ "
\n",
+ " \n",
+ " 549 | \n",
+ " 1.457700 | \n",
+ "
\n",
+ " \n",
+ " 550 | \n",
+ " 1.390200 | \n",
+ "
\n",
+ " \n",
+ " 551 | \n",
+ " 1.399700 | \n",
+ "
\n",
+ " \n",
+ " 552 | \n",
+ " 1.417600 | \n",
+ "
\n",
+ " \n",
+ " 553 | \n",
+ " 1.579800 | \n",
+ "
\n",
+ " \n",
+ " 554 | \n",
+ " 1.472400 | \n",
+ "
\n",
+ " \n",
+ " 555 | \n",
+ " 1.386100 | \n",
+ "
\n",
+ " \n",
+ " 556 | \n",
+ " 1.439000 | \n",
+ "
\n",
+ " \n",
+ " 557 | \n",
+ " 1.418300 | \n",
+ "
\n",
+ " \n",
+ " 558 | \n",
+ " 1.444300 | \n",
+ "
\n",
+ " \n",
+ " 559 | \n",
+ " 1.516500 | \n",
+ "
\n",
+ " \n",
+ " 560 | \n",
+ " 1.550100 | \n",
+ "
\n",
+ " \n",
+ " 561 | \n",
+ " 1.410800 | \n",
+ "
\n",
+ " \n",
+ " 562 | \n",
+ " 1.560600 | \n",
+ "
\n",
+ " \n",
+ " 563 | \n",
+ " 1.523800 | \n",
+ "
\n",
+ " \n",
+ " 564 | \n",
+ " 1.489200 | \n",
+ "
\n",
+ " \n",
+ " 565 | \n",
+ " 1.423400 | \n",
+ "
\n",
+ " \n",
+ " 566 | \n",
+ " 1.436900 | \n",
+ "
\n",
+ " \n",
+ " 567 | \n",
+ " 1.546700 | \n",
+ "
\n",
+ " \n",
+ " 568 | \n",
+ " 1.393200 | \n",
+ "
\n",
+ " \n",
+ " 569 | \n",
+ " 1.556600 | \n",
+ "
\n",
+ " \n",
+ " 570 | \n",
+ " 1.446700 | \n",
+ "
\n",
+ " \n",
+ " 571 | \n",
+ " 1.380600 | \n",
+ "
\n",
+ " \n",
+ " 572 | \n",
+ " 1.340500 | \n",
+ "
\n",
+ " \n",
+ " 573 | \n",
+ " 1.477000 | \n",
+ "
\n",
+ " \n",
+ " 574 | \n",
+ " 1.367000 | \n",
+ "
\n",
+ " \n",
+ " 575 | \n",
+ " 1.643500 | \n",
+ "
\n",
+ " \n",
+ " 576 | \n",
+ " 1.448600 | \n",
+ "
\n",
+ " \n",
+ " 577 | \n",
+ " 1.419600 | \n",
+ "
\n",
+ " \n",
+ " 578 | \n",
+ " 1.568400 | \n",
+ "
\n",
+ " \n",
+ " 579 | \n",
+ " 1.473300 | \n",
+ "
\n",
+ " \n",
+ " 580 | \n",
+ " 1.650400 | \n",
+ "
\n",
+ " \n",
+ " 581 | \n",
+ " 1.572000 | \n",
+ "
\n",
+ " \n",
+ " 582 | \n",
+ " 1.499300 | \n",
+ "
\n",
+ " \n",
+ " 583 | \n",
+ " 1.613200 | \n",
+ "
\n",
+ " \n",
+ " 584 | \n",
+ " 1.566500 | \n",
+ "
\n",
+ " \n",
+ " 585 | \n",
+ " 1.477800 | \n",
+ "
\n",
+ " \n",
+ " 586 | \n",
+ " 1.507300 | \n",
+ "
\n",
+ " \n",
+ " 587 | \n",
+ " 1.374800 | \n",
+ "
\n",
+ " \n",
+ " 588 | \n",
+ " 1.480100 | \n",
+ "
\n",
+ " \n",
+ " 589 | \n",
+ " 1.357000 | \n",
+ "
\n",
+ " \n",
+ " 590 | \n",
+ " 1.328300 | \n",
+ "
\n",
+ " \n",
+ " 591 | \n",
+ " 1.343400 | \n",
+ "
\n",
+ " \n",
+ " 592 | \n",
+ " 1.470600 | \n",
+ "
\n",
+ " \n",
+ " 593 | \n",
+ " 1.524700 | \n",
+ "
\n",
+ " \n",
+ " 594 | \n",
+ " 1.420600 | \n",
+ "
\n",
+ " \n",
+ " 595 | \n",
+ " 1.398400 | \n",
+ "
\n",
+ " \n",
+ " 596 | \n",
+ " 1.498600 | \n",
+ "
\n",
+ " \n",
+ " 597 | \n",
+ " 1.530700 | \n",
+ "
\n",
+ " \n",
+ " 598 | \n",
+ " 1.520700 | \n",
+ "
\n",
+ " \n",
+ " 599 | \n",
+ " 1.579800 | \n",
+ "
\n",
+ " \n",
+ " 600 | \n",
+ " 1.559300 | \n",
+ "
\n",
+ " \n",
+ " 601 | \n",
+ " 1.400800 | \n",
+ "
\n",
+ " \n",
+ " 602 | \n",
+ " 1.489000 | \n",
+ "
\n",
+ " \n",
+ " 603 | \n",
+ " 1.532900 | \n",
+ "
\n",
+ " \n",
+ " 604 | \n",
+ " 1.507300 | \n",
+ "
\n",
+ " \n",
+ " 605 | \n",
+ " 1.447400 | \n",
+ "
\n",
+ " \n",
+ " 606 | \n",
+ " 1.527100 | \n",
+ "
\n",
+ " \n",
+ " 607 | \n",
+ " 1.433700 | \n",
+ "
\n",
+ " \n",
+ " 608 | \n",
+ " 1.533300 | \n",
+ "
\n",
+ " \n",
+ " 609 | \n",
+ " 1.469300 | \n",
+ "
\n",
+ " \n",
+ " 610 | \n",
+ " 1.504100 | \n",
+ "
\n",
+ " \n",
+ " 611 | \n",
+ " 1.416300 | \n",
+ "
\n",
+ " \n",
+ " 612 | \n",
+ " 1.601600 | \n",
+ "
\n",
+ " \n",
+ " 613 | \n",
+ " 1.526500 | \n",
+ "
\n",
+ " \n",
+ " 614 | \n",
+ " 1.491200 | \n",
+ "
\n",
+ " \n",
+ " 615 | \n",
+ " 1.605900 | \n",
+ "
\n",
+ " \n",
+ " 616 | \n",
+ " 1.561700 | \n",
+ "
\n",
+ " \n",
+ " 617 | \n",
+ " 1.384500 | \n",
+ "
\n",
+ " \n",
+ " 618 | \n",
+ " 1.561900 | \n",
+ "
\n",
+ " \n",
+ " 619 | \n",
+ " 1.416700 | \n",
+ "
\n",
+ " \n",
+ " 620 | \n",
+ " 1.484600 | \n",
+ "
\n",
+ " \n",
+ " 621 | \n",
+ " 1.558600 | \n",
+ "
\n",
+ " \n",
+ " 622 | \n",
+ " 1.449400 | \n",
+ "
\n",
+ " \n",
+ " 623 | \n",
+ " 1.477200 | \n",
+ "
\n",
+ " \n",
+ " 624 | \n",
+ " 1.557600 | \n",
+ "
\n",
+ " \n",
+ " 625 | \n",
+ " 1.550600 | \n",
+ "
\n",
+ " \n",
+ " 626 | \n",
+ " 1.575000 | \n",
+ "
\n",
+ " \n",
+ " 627 | \n",
+ " 1.376900 | \n",
+ "
\n",
+ " \n",
+ " 628 | \n",
+ " 1.557200 | \n",
+ "
\n",
+ " \n",
+ " 629 | \n",
+ " 1.466200 | \n",
+ "
\n",
+ " \n",
+ " 630 | \n",
+ " 1.390700 | \n",
+ "
\n",
+ " \n",
+ " 631 | \n",
+ " 1.441400 | \n",
+ "
\n",
+ " \n",
+ " 632 | \n",
+ " 1.526600 | \n",
+ "
\n",
+ " \n",
+ " 633 | \n",
+ " 1.455400 | \n",
+ "
\n",
+ " \n",
+ " 634 | \n",
+ " 1.310500 | \n",
+ "
\n",
+ " \n",
+ " 635 | \n",
+ " 1.445300 | \n",
+ "
\n",
+ " \n",
+ " 636 | \n",
+ " 1.431300 | \n",
+ "
\n",
+ " \n",
+ " 637 | \n",
+ " 1.596800 | \n",
+ "
\n",
+ " \n",
+ " 638 | \n",
+ " 1.520600 | \n",
+ "
\n",
+ " \n",
+ " 639 | \n",
+ " 1.554900 | \n",
+ "
\n",
+ " \n",
+ " 640 | \n",
+ " 1.456100 | \n",
+ "
\n",
+ " \n",
+ " 641 | \n",
+ " 1.566200 | \n",
+ "
\n",
+ " \n",
+ " 642 | \n",
+ " 1.507100 | \n",
+ "
\n",
+ " \n",
+ " 643 | \n",
+ " 1.522700 | \n",
+ "
\n",
+ " \n",
+ " 644 | \n",
+ " 1.482700 | \n",
+ "
\n",
+ " \n",
+ " 645 | \n",
+ " 1.525900 | \n",
+ "
\n",
+ " \n",
+ " 646 | \n",
+ " 1.327800 | \n",
+ "
\n",
+ " \n",
+ " 647 | \n",
+ " 1.441400 | \n",
+ "
\n",
+ " \n",
+ " 648 | \n",
+ " 1.412400 | \n",
+ "
\n",
+ " \n",
+ " 649 | \n",
+ " 1.338100 | \n",
+ "
\n",
+ " \n",
+ " 650 | \n",
+ " 1.466900 | \n",
+ "
\n",
+ " \n",
+ " 651 | \n",
+ " 1.592100 | \n",
+ "
\n",
+ " \n",
+ " 652 | \n",
+ " 1.473300 | \n",
+ "
\n",
+ " \n",
+ " 653 | \n",
+ " 1.526600 | \n",
+ "
\n",
+ " \n",
+ " 654 | \n",
+ " 1.484900 | \n",
+ "
\n",
+ " \n",
+ " 655 | \n",
+ " 1.537900 | \n",
+ "
\n",
+ " \n",
+ " 656 | \n",
+ " 1.368500 | \n",
+ "
\n",
+ " \n",
+ " 657 | \n",
+ " 1.332000 | \n",
+ "
\n",
+ " \n",
+ " 658 | \n",
+ " 1.545500 | \n",
+ "
\n",
+ " \n",
+ " 659 | \n",
+ " 1.425000 | \n",
+ "
\n",
+ " \n",
+ " 660 | \n",
+ " 1.487300 | \n",
+ "
\n",
+ " \n",
+ " 661 | \n",
+ " 1.499200 | \n",
+ "
\n",
+ " \n",
+ " 662 | \n",
+ " 1.461900 | \n",
+ "
\n",
+ " \n",
+ " 663 | \n",
+ " 1.495800 | \n",
+ "
\n",
+ " \n",
+ " 664 | \n",
+ " 1.432700 | \n",
+ "
\n",
+ " \n",
+ " 665 | \n",
+ " 1.480300 | \n",
+ "
\n",
+ " \n",
+ " 666 | \n",
+ " 1.452000 | \n",
+ "
\n",
+ " \n",
+ " 667 | \n",
+ " 1.516700 | \n",
+ "
\n",
+ " \n",
+ " 668 | \n",
+ " 1.465200 | \n",
+ "
\n",
+ " \n",
+ " 669 | \n",
+ " 1.455800 | \n",
+ "
\n",
+ " \n",
+ " 670 | \n",
+ " 1.402400 | \n",
+ "
\n",
+ " \n",
+ " 671 | \n",
+ " 1.377000 | \n",
+ "
\n",
+ " \n",
+ " 672 | \n",
+ " 1.540900 | \n",
+ "
\n",
+ " \n",
+ " 673 | \n",
+ " 1.436500 | \n",
+ "
\n",
+ " \n",
+ " 674 | \n",
+ " 1.597800 | \n",
+ "
\n",
+ " \n",
+ " 675 | \n",
+ " 1.432400 | \n",
+ "
\n",
+ " \n",
+ " 676 | \n",
+ " 1.417700 | \n",
+ "
\n",
+ " \n",
+ " 677 | \n",
+ " 1.305100 | \n",
+ "
\n",
+ " \n",
+ " 678 | \n",
+ " 1.543400 | \n",
+ "
\n",
+ " \n",
+ " 679 | \n",
+ " 1.629200 | \n",
+ "
\n",
+ " \n",
+ " 680 | \n",
+ " 1.404100 | \n",
+ "
\n",
+ " \n",
+ " 681 | \n",
+ " 1.544200 | \n",
+ "
\n",
+ " \n",
+ " 682 | \n",
+ " 1.552600 | \n",
+ "
\n",
+ " \n",
+ " 683 | \n",
+ " 1.422000 | \n",
+ "
\n",
+ " \n",
+ " 684 | \n",
+ " 1.477900 | \n",
+ "
\n",
+ " \n",
+ " 685 | \n",
+ " 1.293200 | \n",
+ "
\n",
+ " \n",
+ " 686 | \n",
+ " 1.411200 | \n",
+ "
\n",
+ " \n",
+ " 687 | \n",
+ " 1.480900 | \n",
+ "
\n",
+ " \n",
+ " 688 | \n",
+ " 1.486800 | \n",
+ "
\n",
+ " \n",
+ " 689 | \n",
+ " 1.316400 | \n",
+ "
\n",
+ " \n",
+ " 690 | \n",
+ " 1.466900 | \n",
+ "
\n",
+ " \n",
+ " 691 | \n",
+ " 1.376700 | \n",
+ "
\n",
+ " \n",
+ " 692 | \n",
+ " 1.440000 | \n",
+ "
\n",
+ " \n",
+ " 693 | \n",
+ " 1.594300 | \n",
+ "
\n",
+ " \n",
+ " 694 | \n",
+ " 1.482100 | \n",
+ "
\n",
+ " \n",
+ " 695 | \n",
+ " 1.537500 | \n",
+ "
\n",
+ " \n",
+ " 696 | \n",
+ " 1.543200 | \n",
+ "
\n",
+ " \n",
+ " 697 | \n",
+ " 1.458800 | \n",
+ "
\n",
+ " \n",
+ " 698 | \n",
+ " 1.493900 | \n",
+ "
\n",
+ " \n",
+ " 699 | \n",
+ " 1.517100 | \n",
+ "
\n",
+ " \n",
+ " 700 | \n",
+ " 1.408600 | \n",
+ "
\n",
+ " \n",
+ " 701 | \n",
+ " 1.488700 | \n",
+ "
\n",
+ " \n",
+ " 702 | \n",
+ " 1.363300 | \n",
+ "
\n",
+ " \n",
+ " 703 | \n",
+ " 1.300900 | \n",
+ "
\n",
+ " \n",
+ " 704 | \n",
+ " 1.488000 | \n",
+ "
\n",
+ " \n",
+ " 705 | \n",
+ " 1.377400 | \n",
+ "
\n",
+ " \n",
+ " 706 | \n",
+ " 1.526500 | \n",
+ "
\n",
+ " \n",
+ " 707 | \n",
+ " 1.392900 | \n",
+ "
\n",
+ " \n",
+ " 708 | \n",
+ " 1.536000 | \n",
+ "
\n",
+ " \n",
+ " 709 | \n",
+ " 1.349900 | \n",
+ "
\n",
+ " \n",
+ " 710 | \n",
+ " 1.447300 | \n",
+ "
\n",
+ " \n",
+ " 711 | \n",
+ " 1.349600 | \n",
+ "
\n",
+ " \n",
+ " 712 | \n",
+ " 1.548100 | \n",
+ "
\n",
+ " \n",
+ " 713 | \n",
+ " 1.441000 | \n",
+ "
\n",
+ " \n",
+ " 714 | \n",
+ " 1.418200 | \n",
+ "
\n",
+ " \n",
+ " 715 | \n",
+ " 1.434100 | \n",
+ "
\n",
+ " \n",
+ " 716 | \n",
+ " 1.387700 | \n",
+ "
\n",
+ " \n",
+ " 717 | \n",
+ " 1.293200 | \n",
+ "
\n",
+ " \n",
+ " 718 | \n",
+ " 1.396800 | \n",
+ "
\n",
+ " \n",
+ " 719 | \n",
+ " 1.430700 | \n",
+ "
\n",
+ " \n",
+ " 720 | \n",
+ " 1.363800 | \n",
+ "
\n",
+ " \n",
+ " 721 | \n",
+ " 1.471500 | \n",
+ "
\n",
+ " \n",
+ " 722 | \n",
+ " 1.502400 | \n",
+ "
\n",
+ " \n",
+ " 723 | \n",
+ " 1.394000 | \n",
+ "
\n",
+ " \n",
+ " 724 | \n",
+ " 1.339500 | \n",
+ "
\n",
+ " \n",
+ " 725 | \n",
+ " 1.478800 | \n",
+ "
\n",
+ " \n",
+ " 726 | \n",
+ " 1.554500 | \n",
+ "
\n",
+ " \n",
+ " 727 | \n",
+ " 1.355800 | \n",
+ "
\n",
+ " \n",
+ " 728 | \n",
+ " 1.422100 | \n",
+ "
\n",
+ " \n",
+ " 729 | \n",
+ " 1.487600 | \n",
+ "
\n",
+ " \n",
+ " 730 | \n",
+ " 1.425300 | \n",
+ "
\n",
+ " \n",
+ " 731 | \n",
+ " 1.429600 | \n",
+ "
\n",
+ " \n",
+ " 732 | \n",
+ " 1.440100 | \n",
+ "
\n",
+ " \n",
+ " 733 | \n",
+ " 1.484700 | \n",
+ "
\n",
+ " \n",
+ " 734 | \n",
+ " 1.588300 | \n",
+ "
\n",
+ " \n",
+ " 735 | \n",
+ " 1.428800 | \n",
+ "
\n",
+ " \n",
+ " 736 | \n",
+ " 1.510200 | \n",
+ "
\n",
+ " \n",
+ " 737 | \n",
+ " 1.418300 | \n",
+ "
\n",
+ " \n",
+ " 738 | \n",
+ " 1.461400 | \n",
+ "
\n",
+ " \n",
+ " 739 | \n",
+ " 1.455600 | \n",
+ "
\n",
+ " \n",
+ " 740 | \n",
+ " 1.377100 | \n",
+ "
\n",
+ " \n",
+ " 741 | \n",
+ " 1.382400 | \n",
+ "
\n",
+ " \n",
+ " 742 | \n",
+ " 1.520200 | \n",
+ "
\n",
+ " \n",
+ " 743 | \n",
+ " 1.383200 | \n",
+ "
\n",
+ " \n",
+ " 744 | \n",
+ " 1.494000 | \n",
+ "
\n",
+ " \n",
+ " 745 | \n",
+ " 1.567400 | \n",
+ "
\n",
+ " \n",
+ " 746 | \n",
+ " 1.437000 | \n",
+ "
\n",
+ " \n",
+ " 747 | \n",
+ " 1.458000 | \n",
+ "
\n",
+ " \n",
+ " 748 | \n",
+ " 1.483100 | \n",
+ "
\n",
+ " \n",
+ " 749 | \n",
+ " 1.473700 | \n",
+ "
\n",
+ " \n",
+ " 750 | \n",
+ " 1.644300 | \n",
+ "
\n",
+ " \n",
+ " 751 | \n",
+ " 1.348900 | \n",
+ "
\n",
+ " \n",
+ " 752 | \n",
+ " 1.442800 | \n",
+ "
\n",
+ " \n",
+ " 753 | \n",
+ " 1.616400 | \n",
+ "
\n",
+ " \n",
+ " 754 | \n",
+ " 1.459600 | \n",
+ "
\n",
+ " \n",
+ " 755 | \n",
+ " 1.478100 | \n",
+ "
\n",
+ " \n",
+ " 756 | \n",
+ " 1.469500 | \n",
+ "
\n",
+ " \n",
+ " 757 | \n",
+ " 1.510300 | \n",
+ "
\n",
+ " \n",
+ " 758 | \n",
+ " 1.402400 | \n",
+ "
\n",
+ " \n",
+ " 759 | \n",
+ " 1.477400 | \n",
+ "
\n",
+ " \n",
+ " 760 | \n",
+ " 1.597400 | \n",
+ "
\n",
+ " \n",
+ " 761 | \n",
+ " 1.470700 | \n",
+ "
\n",
+ " \n",
+ " 762 | \n",
+ " 1.586600 | \n",
+ "
\n",
+ " \n",
+ " 763 | \n",
+ " 1.316800 | \n",
+ "
\n",
+ " \n",
+ " 764 | \n",
+ " 1.298600 | \n",
+ "
\n",
+ " \n",
+ " 765 | \n",
+ " 1.482500 | \n",
+ "
\n",
+ " \n",
+ " 766 | \n",
+ " 1.544300 | \n",
+ "
\n",
+ " \n",
+ " 767 | \n",
+ " 1.396300 | \n",
+ "
\n",
+ " \n",
+ " 768 | \n",
+ " 1.321000 | \n",
+ "
\n",
+ " \n",
+ " 769 | \n",
+ " 1.424400 | \n",
+ "
\n",
+ " \n",
+ " 770 | \n",
+ " 1.449300 | \n",
+ "
\n",
+ " \n",
+ " 771 | \n",
+ " 1.479900 | \n",
+ "
\n",
+ " \n",
+ " 772 | \n",
+ " 1.451300 | \n",
+ "
\n",
+ " \n",
+ " 773 | \n",
+ " 1.567600 | \n",
+ "
\n",
+ " \n",
+ " 774 | \n",
+ " 1.257600 | \n",
+ "
\n",
+ " \n",
+ " 775 | \n",
+ " 1.649800 | \n",
+ "
\n",
+ " \n",
+ " 776 | \n",
+ " 1.516400 | \n",
+ "
\n",
+ " \n",
+ " 777 | \n",
+ " 1.461400 | \n",
+ "
\n",
+ " \n",
+ " 778 | \n",
+ " 1.494800 | \n",
+ "
\n",
+ " \n",
+ " 779 | \n",
+ " 1.621100 | \n",
+ "
\n",
+ " \n",
+ " 780 | \n",
+ " 1.571900 | \n",
+ "
\n",
+ " \n",
+ " 781 | \n",
+ " 1.331500 | \n",
+ "
\n",
+ " \n",
+ " 782 | \n",
+ " 1.575500 | \n",
+ "
\n",
+ " \n",
+ " 783 | \n",
+ " 1.439000 | \n",
+ "
\n",
+ " \n",
+ " 784 | \n",
+ " 1.347600 | \n",
+ "
\n",
+ " \n",
+ " 785 | \n",
+ " 1.522800 | \n",
+ "
\n",
+ " \n",
+ " 786 | \n",
+ " 1.584100 | \n",
+ "
\n",
+ " \n",
+ " 787 | \n",
+ " 1.419300 | \n",
+ "
\n",
+ " \n",
+ " 788 | \n",
+ " 1.385400 | \n",
+ "
\n",
+ " \n",
+ " 789 | \n",
+ " 1.435000 | \n",
+ "
\n",
+ " \n",
+ " 790 | \n",
+ " 1.483800 | \n",
+ "
\n",
+ " \n",
+ " 791 | \n",
+ " 1.452200 | \n",
+ "
\n",
+ " \n",
+ " 792 | \n",
+ " 1.587100 | \n",
+ "
\n",
+ " \n",
+ " 793 | \n",
+ " 1.495600 | \n",
+ "
\n",
+ " \n",
+ " 794 | \n",
+ " 1.485100 | \n",
+ "
\n",
+ " \n",
+ " 795 | \n",
+ " 1.444100 | \n",
+ "
\n",
+ " \n",
+ " 796 | \n",
+ " 1.534800 | \n",
+ "
\n",
+ " \n",
+ " 797 | \n",
+ " 1.436100 | \n",
+ "
\n",
+ " \n",
+ " 798 | \n",
+ " 1.366400 | \n",
+ "
\n",
+ " \n",
+ " 799 | \n",
+ " 1.603100 | \n",
+ "
\n",
+ " \n",
+ " 800 | \n",
+ " 1.505600 | \n",
+ "
\n",
+ " \n",
+ " 801 | \n",
+ " 1.484300 | \n",
+ "
\n",
+ " \n",
+ " 802 | \n",
+ " 1.353700 | \n",
+ "
\n",
+ " \n",
+ " 803 | \n",
+ " 1.462300 | \n",
+ "
\n",
+ " \n",
+ " 804 | \n",
+ " 1.497700 | \n",
+ "
\n",
+ " \n",
+ " 805 | \n",
+ " 1.448300 | \n",
+ "
\n",
+ " \n",
+ " 806 | \n",
+ " 1.388900 | \n",
+ "
\n",
+ " \n",
+ " 807 | \n",
+ " 1.440900 | \n",
+ "
\n",
+ " \n",
+ " 808 | \n",
+ " 1.437100 | \n",
+ "
\n",
+ " \n",
+ " 809 | \n",
+ " 1.527300 | \n",
+ "
\n",
+ " \n",
+ " 810 | \n",
+ " 1.497200 | \n",
+ "
\n",
+ " \n",
+ " 811 | \n",
+ " 1.482200 | \n",
+ "
\n",
+ " \n",
+ " 812 | \n",
+ " 1.361200 | \n",
+ "
\n",
+ " \n",
+ " 813 | \n",
+ " 1.435700 | \n",
+ "
\n",
+ " \n",
+ " 814 | \n",
+ " 1.463700 | \n",
+ "
\n",
+ " \n",
+ " 815 | \n",
+ " 1.478700 | \n",
+ "
\n",
+ " \n",
+ " 816 | \n",
+ " 1.523100 | \n",
+ "
\n",
+ " \n",
+ " 817 | \n",
+ " 1.560500 | \n",
+ "
\n",
+ " \n",
+ " 818 | \n",
+ " 1.457100 | \n",
+ "
\n",
+ " \n",
+ " 819 | \n",
+ " 1.477400 | \n",
+ "
\n",
+ " \n",
+ " 820 | \n",
+ " 1.558200 | \n",
+ "
\n",
+ " \n",
+ " 821 | \n",
+ " 1.424400 | \n",
+ "
\n",
+ " \n",
+ " 822 | \n",
+ " 1.578200 | \n",
+ "
\n",
+ " \n",
+ " 823 | \n",
+ " 1.465400 | \n",
+ "
\n",
+ " \n",
+ " 824 | \n",
+ " 1.343500 | \n",
+ "
\n",
+ " \n",
+ " 825 | \n",
+ " 1.405000 | \n",
+ "
\n",
+ " \n",
+ " 826 | \n",
+ " 1.476500 | \n",
+ "
\n",
+ " \n",
+ " 827 | \n",
+ " 1.458900 | \n",
+ "
\n",
+ " \n",
+ " 828 | \n",
+ " 1.458300 | \n",
+ "
\n",
+ " \n",
+ " 829 | \n",
+ " 1.497900 | \n",
+ "
\n",
+ " \n",
+ " 830 | \n",
+ " 1.436900 | \n",
+ "
\n",
+ " \n",
+ " 831 | \n",
+ " 1.575000 | \n",
+ "
\n",
+ " \n",
+ " 832 | \n",
+ " 1.531200 | \n",
+ "
\n",
+ " \n",
+ " 833 | \n",
+ " 1.490700 | \n",
+ "
\n",
+ " \n",
+ " 834 | \n",
+ " 1.556900 | \n",
+ "
\n",
+ " \n",
+ " 835 | \n",
+ " 1.620300 | \n",
+ "
\n",
+ " \n",
+ " 836 | \n",
+ " 1.563400 | \n",
+ "
\n",
+ " \n",
+ " 837 | \n",
+ " 1.436300 | \n",
+ "
\n",
+ " \n",
+ " 838 | \n",
+ " 1.465600 | \n",
+ "
\n",
+ " \n",
+ " 839 | \n",
+ " 1.412700 | \n",
+ "
\n",
+ " \n",
+ " 840 | \n",
+ " 1.487900 | \n",
+ "
\n",
+ " \n",
+ " 841 | \n",
+ " 1.506800 | \n",
+ "
\n",
+ " \n",
+ " 842 | \n",
+ " 1.427100 | \n",
+ "
\n",
+ " \n",
+ " 843 | \n",
+ " 1.376300 | \n",
+ "
\n",
+ " \n",
+ " 844 | \n",
+ " 1.500300 | \n",
+ "
\n",
+ " \n",
+ " 845 | \n",
+ " 1.573100 | \n",
+ "
\n",
+ " \n",
+ " 846 | \n",
+ " 1.443300 | \n",
+ "
\n",
+ " \n",
+ " 847 | \n",
+ " 1.476400 | \n",
+ "
\n",
+ " \n",
+ " 848 | \n",
+ " 1.497100 | \n",
+ "
\n",
+ " \n",
+ " 849 | \n",
+ " 1.310600 | \n",
+ "
\n",
+ " \n",
+ " 850 | \n",
+ " 1.404200 | \n",
+ "
\n",
+ " \n",
+ " 851 | \n",
+ " 1.575800 | \n",
+ "
\n",
+ " \n",
+ " 852 | \n",
+ " 1.506100 | \n",
+ "
\n",
+ " \n",
+ " 853 | \n",
+ " 1.424900 | \n",
+ "
\n",
+ " \n",
+ " 854 | \n",
+ " 1.522100 | \n",
+ "
\n",
+ " \n",
+ " 855 | \n",
+ " 1.376900 | \n",
+ "
\n",
+ " \n",
+ " 856 | \n",
+ " 1.476000 | \n",
+ "
\n",
+ " \n",
+ " 857 | \n",
+ " 1.339700 | \n",
+ "
\n",
+ " \n",
+ " 858 | \n",
+ " 1.440300 | \n",
+ "
\n",
+ " \n",
+ " 859 | \n",
+ " 1.518100 | \n",
+ "
\n",
+ " \n",
+ " 860 | \n",
+ " 1.411400 | \n",
+ "
\n",
+ " \n",
+ " 861 | \n",
+ " 1.394900 | \n",
+ "
\n",
+ " \n",
+ " 862 | \n",
+ " 1.522100 | \n",
+ "
\n",
+ " \n",
+ " 863 | \n",
+ " 1.436000 | \n",
+ "
\n",
+ " \n",
+ " 864 | \n",
+ " 1.585100 | \n",
+ "
\n",
+ " \n",
+ " 865 | \n",
+ " 1.490100 | \n",
+ "
\n",
+ " \n",
+ " 866 | \n",
+ " 1.472400 | \n",
+ "
\n",
+ " \n",
+ " 867 | \n",
+ " 1.299200 | \n",
+ "
\n",
+ " \n",
+ " 868 | \n",
+ " 1.422200 | \n",
+ "
\n",
+ " \n",
+ " 869 | \n",
+ " 1.487800 | \n",
+ "
\n",
+ " \n",
+ " 870 | \n",
+ " 1.623900 | \n",
+ "
\n",
+ " \n",
+ " 871 | \n",
+ " 1.605000 | \n",
+ "
\n",
+ " \n",
+ " 872 | \n",
+ " 1.580400 | \n",
+ "
\n",
+ " \n",
+ " 873 | \n",
+ " 1.275400 | \n",
+ "
\n",
+ " \n",
+ " 874 | \n",
+ " 1.452700 | \n",
+ "
\n",
+ " \n",
+ " 875 | \n",
+ " 1.400200 | \n",
+ "
\n",
+ " \n",
+ " 876 | \n",
+ " 1.473500 | \n",
+ "
\n",
+ " \n",
+ " 877 | \n",
+ " 1.359500 | \n",
+ "
\n",
+ " \n",
+ " 878 | \n",
+ " 1.495800 | \n",
+ "
\n",
+ " \n",
+ " 879 | \n",
+ " 1.451500 | \n",
+ "
\n",
+ " \n",
+ " 880 | \n",
+ " 1.420400 | \n",
+ "
\n",
+ " \n",
+ " 881 | \n",
+ " 1.528400 | \n",
+ "
\n",
+ " \n",
+ " 882 | \n",
+ " 1.397800 | \n",
+ "
\n",
+ " \n",
+ " 883 | \n",
+ " 1.597900 | \n",
+ "
\n",
+ " \n",
+ " 884 | \n",
+ " 1.509000 | \n",
+ "
\n",
+ " \n",
+ " 885 | \n",
+ " 1.568300 | \n",
+ "
\n",
+ " \n",
+ " 886 | \n",
+ " 1.473000 | \n",
+ "
\n",
+ " \n",
+ " 887 | \n",
+ " 1.553900 | \n",
+ "
\n",
+ " \n",
+ " 888 | \n",
+ " 1.588500 | \n",
+ "
\n",
+ " \n",
+ " 889 | \n",
+ " 1.442500 | \n",
+ "
\n",
+ " \n",
+ " 890 | \n",
+ " 1.415100 | \n",
+ "
\n",
+ " \n",
+ " 891 | \n",
+ " 1.357400 | \n",
+ "
\n",
+ " \n",
+ " 892 | \n",
+ " 1.311900 | \n",
+ "
\n",
+ " \n",
+ " 893 | \n",
+ " 1.405100 | \n",
+ "
\n",
+ " \n",
+ " 894 | \n",
+ " 1.464700 | \n",
+ "
\n",
+ " \n",
+ " 895 | \n",
+ " 1.495000 | \n",
+ "
\n",
+ " \n",
+ " 896 | \n",
+ " 1.488900 | \n",
+ "
\n",
+ " \n",
+ " 897 | \n",
+ " 1.584100 | \n",
+ "
\n",
+ " \n",
+ " 898 | \n",
+ " 1.444000 | \n",
+ "
\n",
+ " \n",
+ " 899 | \n",
+ " 1.414800 | \n",
+ "
\n",
+ " \n",
+ " 900 | \n",
+ " 1.465800 | \n",
+ "
\n",
+ " \n",
+ " 901 | \n",
+ " 1.523400 | \n",
+ "
\n",
+ " \n",
+ " 902 | \n",
+ " 1.518300 | \n",
+ "
\n",
+ " \n",
+ " 903 | \n",
+ " 1.488800 | \n",
+ "
\n",
+ " \n",
+ " 904 | \n",
+ " 1.305900 | \n",
+ "
\n",
+ " \n",
+ " 905 | \n",
+ " 1.549500 | \n",
+ "
\n",
+ " \n",
+ " 906 | \n",
+ " 1.580100 | \n",
+ "
\n",
+ " \n",
+ " 907 | \n",
+ " 1.603000 | \n",
+ "
\n",
+ " \n",
+ " 908 | \n",
+ " 1.450600 | \n",
+ "
\n",
+ " \n",
+ " 909 | \n",
+ " 1.503000 | \n",
+ "
\n",
+ " \n",
+ " 910 | \n",
+ " 1.450300 | \n",
+ "
\n",
+ " \n",
+ " 911 | \n",
+ " 1.382200 | \n",
+ "
\n",
+ " \n",
+ " 912 | \n",
+ " 1.439700 | \n",
+ "
\n",
+ " \n",
+ " 913 | \n",
+ " 1.561000 | \n",
+ "
\n",
+ " \n",
+ " 914 | \n",
+ " 1.443600 | \n",
+ "
\n",
+ " \n",
+ " 915 | \n",
+ " 1.487600 | \n",
+ "
\n",
+ " \n",
+ " 916 | \n",
+ " 1.322300 | \n",
+ "
\n",
+ " \n",
+ " 917 | \n",
+ " 1.318500 | \n",
+ "
\n",
+ " \n",
+ " 918 | \n",
+ " 1.387300 | \n",
+ "
\n",
+ " \n",
+ " 919 | \n",
+ " 1.441600 | \n",
+ "
\n",
+ " \n",
+ " 920 | \n",
+ " 1.519100 | \n",
+ "
\n",
+ " \n",
+ " 921 | \n",
+ " 1.453000 | \n",
+ "
\n",
+ " \n",
+ " 922 | \n",
+ " 1.407000 | \n",
+ "
\n",
+ " \n",
+ " 923 | \n",
+ " 1.422700 | \n",
+ "
\n",
+ " \n",
+ " 924 | \n",
+ " 1.352900 | \n",
+ "
\n",
+ " \n",
+ " 925 | \n",
+ " 1.494900 | \n",
+ "
\n",
+ " \n",
+ " 926 | \n",
+ " 1.434600 | \n",
+ "
\n",
+ " \n",
+ " 927 | \n",
+ " 1.465200 | \n",
+ "
\n",
+ " \n",
+ " 928 | \n",
+ " 1.417500 | \n",
+ "
\n",
+ " \n",
+ " 929 | \n",
+ " 1.342500 | \n",
+ "
\n",
+ " \n",
+ " 930 | \n",
+ " 1.547600 | \n",
+ "
\n",
+ " \n",
+ " 931 | \n",
+ " 1.545800 | \n",
+ "
\n",
+ " \n",
+ " 932 | \n",
+ " 1.496000 | \n",
+ "
\n",
+ " \n",
+ " 933 | \n",
+ " 1.398800 | \n",
+ "
\n",
+ " \n",
+ " 934 | \n",
+ " 1.327900 | \n",
+ "
\n",
+ " \n",
+ " 935 | \n",
+ " 1.587400 | \n",
+ "
\n",
+ " \n",
+ " 936 | \n",
+ " 1.347300 | \n",
+ "
\n",
+ " \n",
+ " 937 | \n",
+ " 1.543000 | \n",
+ "
\n",
+ " \n",
+ " 938 | \n",
+ " 1.418500 | \n",
+ "
\n",
+ " \n",
+ " 939 | \n",
+ " 1.396600 | \n",
+ "
\n",
+ " \n",
+ " 940 | \n",
+ " 1.364200 | \n",
+ "
\n",
+ " \n",
+ " 941 | \n",
+ " 1.439700 | \n",
+ "
\n",
+ " \n",
+ " 942 | \n",
+ " 1.523800 | \n",
+ "
\n",
+ " \n",
+ " 943 | \n",
+ " 1.385000 | \n",
+ "
\n",
+ " \n",
+ " 944 | \n",
+ " 1.491100 | \n",
+ "
\n",
+ " \n",
+ " 945 | \n",
+ " 1.528500 | \n",
+ "
\n",
+ " \n",
+ " 946 | \n",
+ " 1.536600 | \n",
+ "
\n",
+ " \n",
+ " 947 | \n",
+ " 1.292600 | \n",
+ "
\n",
+ " \n",
+ " 948 | \n",
+ " 1.522600 | \n",
+ "
\n",
+ " \n",
+ " 949 | \n",
+ " 1.438900 | \n",
+ "
\n",
+ " \n",
+ " 950 | \n",
+ " 1.423500 | \n",
+ "
\n",
+ " \n",
+ " 951 | \n",
+ " 1.468600 | \n",
+ "
\n",
+ " \n",
+ " 952 | \n",
+ " 1.486000 | \n",
+ "
\n",
+ " \n",
+ " 953 | \n",
+ " 1.542800 | \n",
+ "
\n",
+ " \n",
+ " 954 | \n",
+ " 1.571000 | \n",
+ "
\n",
+ " \n",
+ " 955 | \n",
+ " 1.455500 | \n",
+ "
\n",
+ " \n",
+ " 956 | \n",
+ " 1.434000 | \n",
+ "
\n",
+ " \n",
+ " 957 | \n",
+ " 1.442600 | \n",
+ "
\n",
+ " \n",
+ " 958 | \n",
+ " 1.448800 | \n",
+ "
\n",
+ " \n",
+ " 959 | \n",
+ " 1.342500 | \n",
+ "
\n",
+ " \n",
+ " 960 | \n",
+ " 1.431400 | \n",
+ "
\n",
+ " \n",
+ " 961 | \n",
+ " 1.475000 | \n",
+ "
\n",
+ " \n",
+ " 962 | \n",
+ " 1.483500 | \n",
+ "
\n",
+ " \n",
+ " 963 | \n",
+ " 1.493600 | \n",
+ "
\n",
+ " \n",
+ " 964 | \n",
+ " 1.417400 | \n",
+ "
\n",
+ " \n",
+ " 965 | \n",
+ " 1.352000 | \n",
+ "
\n",
+ " \n",
+ " 966 | \n",
+ " 1.603600 | \n",
+ "
\n",
+ " \n",
+ " 967 | \n",
+ " 1.465300 | \n",
+ "
\n",
+ " \n",
+ " 968 | \n",
+ " 1.454300 | \n",
+ "
\n",
+ " \n",
+ " 969 | \n",
+ " 1.563800 | \n",
+ "
\n",
+ " \n",
+ " 970 | \n",
+ " 1.572700 | \n",
+ "
\n",
+ " \n",
+ " 971 | \n",
+ " 1.428400 | \n",
+ "
\n",
+ " \n",
+ " 972 | \n",
+ " 1.561200 | \n",
+ "
\n",
+ " \n",
+ " 973 | \n",
+ " 1.404200 | \n",
+ "
\n",
+ " \n",
+ " 974 | \n",
+ " 1.628700 | \n",
+ "
\n",
+ " \n",
+ " 975 | \n",
+ " 1.593300 | \n",
+ "
\n",
+ " \n",
+ " 976 | \n",
+ " 1.670900 | \n",
+ "
\n",
+ " \n",
+ " 977 | \n",
+ " 1.438500 | \n",
+ "
\n",
+ " \n",
+ " 978 | \n",
+ " 1.325400 | \n",
+ "
\n",
+ " \n",
+ " 979 | \n",
+ " 1.479200 | \n",
+ "
\n",
+ " \n",
+ " 980 | \n",
+ " 1.411100 | \n",
+ "
\n",
+ " \n",
+ " 981 | \n",
+ " 1.362000 | \n",
+ "
\n",
+ " \n",
+ " 982 | \n",
+ " 1.348000 | \n",
+ "
\n",
+ " \n",
+ " 983 | \n",
+ " 1.381000 | \n",
+ "
\n",
+ " \n",
+ " 984 | \n",
+ " 1.415500 | \n",
+ "
\n",
+ " \n",
+ " 985 | \n",
+ " 1.583300 | \n",
+ "
\n",
+ " \n",
+ " 986 | \n",
+ " 1.465600 | \n",
+ "
\n",
+ " \n",
+ " 987 | \n",
+ " 1.495200 | \n",
+ "
\n",
+ " \n",
+ " 988 | \n",
+ " 1.499300 | \n",
+ "
\n",
+ " \n",
+ " 989 | \n",
+ " 1.455300 | \n",
+ "
\n",
+ " \n",
+ " 990 | \n",
+ " 1.452700 | \n",
+ "
\n",
+ " \n",
+ " 991 | \n",
+ " 1.296100 | \n",
+ "
\n",
+ " \n",
+ " 992 | \n",
+ " 1.356300 | \n",
+ "
\n",
+ " \n",
+ " 993 | \n",
+ " 1.505300 | \n",
+ "
\n",
+ " \n",
+ " 994 | \n",
+ " 1.429800 | \n",
+ "
\n",
+ " \n",
+ " 995 | \n",
+ " 1.423700 | \n",
+ "
\n",
+ " \n",
+ " 996 | \n",
+ " 1.547100 | \n",
+ "
\n",
+ " \n",
+ " 997 | \n",
+ " 1.512000 | \n",
+ "
\n",
+ " \n",
+ " 998 | \n",
+ " 1.458500 | \n",
+ "
\n",
+ " \n",
+ " 999 | \n",
+ " 1.445100 | \n",
+ "
\n",
+ " \n",
+ " 1000 | \n",
+ " 1.381500 | \n",
+ "
\n",
+ " \n",
+ " 1001 | \n",
+ " 1.508700 | \n",
+ "
\n",
+ " \n",
+ " 1002 | \n",
+ " 1.457800 | \n",
+ "
\n",
+ " \n",
+ " 1003 | \n",
+ " 1.508300 | \n",
+ "
\n",
+ " \n",
+ " 1004 | \n",
+ " 1.370400 | \n",
+ "
\n",
+ " \n",
+ " 1005 | \n",
+ " 1.487900 | \n",
+ "
\n",
+ " \n",
+ " 1006 | \n",
+ " 1.517900 | \n",
+ "
\n",
+ " \n",
+ " 1007 | \n",
+ " 1.492000 | \n",
+ "
\n",
+ " \n",
+ " 1008 | \n",
+ " 1.462700 | \n",
+ "
\n",
+ " \n",
+ " 1009 | \n",
+ " 1.397000 | \n",
+ "
\n",
+ " \n",
+ " 1010 | \n",
+ " 1.522600 | \n",
+ "
\n",
+ " \n",
+ " 1011 | \n",
+ " 1.492100 | \n",
+ "
\n",
+ " \n",
+ " 1012 | \n",
+ " 1.318800 | \n",
+ "
\n",
+ " \n",
+ " 1013 | \n",
+ " 1.501300 | \n",
+ "
\n",
+ " \n",
+ " 1014 | \n",
+ " 1.491900 | \n",
+ "
\n",
+ " \n",
+ " 1015 | \n",
+ " 1.413900 | \n",
+ "
\n",
+ " \n",
+ " 1016 | \n",
+ " 1.453600 | \n",
+ "
\n",
+ " \n",
+ " 1017 | \n",
+ " 1.459800 | \n",
+ "
\n",
+ " \n",
+ " 1018 | \n",
+ " 1.492700 | \n",
+ "
\n",
+ " \n",
+ " 1019 | \n",
+ " 1.471900 | \n",
+ "
\n",
+ " \n",
+ " 1020 | \n",
+ " 1.328900 | \n",
+ "
\n",
+ " \n",
+ " 1021 | \n",
+ " 1.552300 | \n",
+ "
\n",
+ " \n",
+ " 1022 | \n",
+ " 1.300600 | \n",
+ "
\n",
+ " \n",
+ " 1023 | \n",
+ " 1.366600 | \n",
+ "
\n",
+ " \n",
+ " 1024 | \n",
+ " 1.365000 | \n",
+ "
\n",
+ " \n",
+ " 1025 | \n",
+ " 1.420200 | \n",
+ "
\n",
+ " \n",
+ " 1026 | \n",
+ " 1.392600 | \n",
+ "
\n",
+ " \n",
+ " 1027 | \n",
+ " 1.492400 | \n",
+ "
\n",
+ " \n",
+ " 1028 | \n",
+ " 1.524600 | \n",
+ "
\n",
+ " \n",
+ " 1029 | \n",
+ " 1.371600 | \n",
+ "
\n",
+ " \n",
+ " 1030 | \n",
+ " 1.431100 | \n",
+ "
\n",
+ " \n",
+ " 1031 | \n",
+ " 1.471200 | \n",
+ "
\n",
+ " \n",
+ " 1032 | \n",
+ " 1.534200 | \n",
+ "
\n",
+ " \n",
+ " 1033 | \n",
+ " 1.417100 | \n",
+ "
\n",
+ " \n",
+ " 1034 | \n",
+ " 1.394700 | \n",
+ "
\n",
+ " \n",
+ " 1035 | \n",
+ " 1.455900 | \n",
+ "
\n",
+ " \n",
+ " 1036 | \n",
+ " 1.536200 | \n",
+ "
\n",
+ " \n",
+ " 1037 | \n",
+ " 1.626100 | \n",
+ "
\n",
+ " \n",
+ " 1038 | \n",
+ " 1.588400 | \n",
+ "
\n",
+ " \n",
+ " 1039 | \n",
+ " 1.538200 | \n",
+ "
\n",
+ " \n",
+ " 1040 | \n",
+ " 1.375200 | \n",
+ "
\n",
+ " \n",
+ " 1041 | \n",
+ " 1.589300 | \n",
+ "
\n",
+ " \n",
+ " 1042 | \n",
+ " 1.557200 | \n",
+ "
\n",
+ " \n",
+ " 1043 | \n",
+ " 1.526000 | \n",
+ "
\n",
+ " \n",
+ " 1044 | \n",
+ " 1.349600 | \n",
+ "
\n",
+ " \n",
+ " 1045 | \n",
+ " 1.420000 | \n",
+ "
\n",
+ " \n",
+ " 1046 | \n",
+ " 1.444700 | \n",
+ "
\n",
+ " \n",
+ " 1047 | \n",
+ " 1.411600 | \n",
+ "
\n",
+ " \n",
+ " 1048 | \n",
+ " 1.444600 | \n",
+ "
\n",
+ " \n",
+ " 1049 | \n",
+ " 1.591000 | \n",
+ "
\n",
+ " \n",
+ " 1050 | \n",
+ " 1.384300 | \n",
+ "
\n",
+ " \n",
+ " 1051 | \n",
+ " 1.470500 | \n",
+ "
\n",
+ " \n",
+ " 1052 | \n",
+ " 1.380200 | \n",
+ "
\n",
+ " \n",
+ " 1053 | \n",
+ " 1.278600 | \n",
+ "
\n",
+ " \n",
+ " 1054 | \n",
+ " 1.276000 | \n",
+ "
\n",
+ " \n",
+ " 1055 | \n",
+ " 1.363100 | \n",
+ "
\n",
+ " \n",
+ " 1056 | \n",
+ " 1.487500 | \n",
+ "
\n",
+ " \n",
+ " 1057 | \n",
+ " 1.583300 | \n",
+ "
\n",
+ " \n",
+ " 1058 | \n",
+ " 1.470100 | \n",
+ "
\n",
+ " \n",
+ " 1059 | \n",
+ " 1.450300 | \n",
+ "
\n",
+ " \n",
+ " 1060 | \n",
+ " 1.449600 | \n",
+ "
\n",
+ " \n",
+ " 1061 | \n",
+ " 1.509500 | \n",
+ "
\n",
+ " \n",
+ " 1062 | \n",
+ " 1.436600 | \n",
+ "
\n",
+ " \n",
+ " 1063 | \n",
+ " 1.538900 | \n",
+ "
\n",
+ " \n",
+ " 1064 | \n",
+ " 1.336300 | \n",
+ "
\n",
+ " \n",
+ " 1065 | \n",
+ " 1.403300 | \n",
+ "
\n",
+ " \n",
+ " 1066 | \n",
+ " 1.440900 | \n",
+ "
\n",
+ " \n",
+ " 1067 | \n",
+ " 1.482600 | \n",
+ "
\n",
+ " \n",
+ " 1068 | \n",
+ " 1.482000 | \n",
+ "
\n",
+ " \n",
+ " 1069 | \n",
+ " 1.474700 | \n",
+ "
\n",
+ " \n",
+ " 1070 | \n",
+ " 1.539600 | \n",
+ "
\n",
+ " \n",
+ " 1071 | \n",
+ " 1.492200 | \n",
+ "
\n",
+ " \n",
+ " 1072 | \n",
+ " 1.409400 | \n",
+ "
\n",
+ " \n",
+ " 1073 | \n",
+ " 1.445600 | \n",
+ "
\n",
+ " \n",
+ " 1074 | \n",
+ " 1.339800 | \n",
+ "
\n",
+ " \n",
+ " 1075 | \n",
+ " 1.505300 | \n",
+ "
\n",
+ " \n",
+ " 1076 | \n",
+ " 1.513600 | \n",
+ "
\n",
+ " \n",
+ " 1077 | \n",
+ " 1.508100 | \n",
+ "
\n",
+ " \n",
+ " 1078 | \n",
+ " 1.592900 | \n",
+ "
\n",
+ " \n",
+ " 1079 | \n",
+ " 1.465400 | \n",
+ "
\n",
+ " \n",
+ " 1080 | \n",
+ " 1.285500 | \n",
+ "
\n",
+ " \n",
+ " 1081 | \n",
+ " 1.412400 | \n",
+ "
\n",
+ " \n",
+ " 1082 | \n",
+ " 1.588400 | \n",
+ "
\n",
+ " \n",
+ " 1083 | \n",
+ " 1.369300 | \n",
+ "
\n",
+ " \n",
+ " 1084 | \n",
+ " 1.412800 | \n",
+ "
\n",
+ " \n",
+ " 1085 | \n",
+ " 1.517000 | \n",
+ "
\n",
+ " \n",
+ " 1086 | \n",
+ " 1.518100 | \n",
+ "
\n",
+ " \n",
+ " 1087 | \n",
+ " 1.453300 | \n",
+ "
\n",
+ " \n",
+ " 1088 | \n",
+ " 1.358200 | \n",
+ "
\n",
+ " \n",
+ " 1089 | \n",
+ " 1.441300 | \n",
+ "
\n",
+ " \n",
+ " 1090 | \n",
+ " 1.573100 | \n",
+ "
\n",
+ " \n",
+ " 1091 | \n",
+ " 1.470400 | \n",
+ "
\n",
+ " \n",
+ " 1092 | \n",
+ " 1.446200 | \n",
+ "
\n",
+ " \n",
+ " 1093 | \n",
+ " 1.404700 | \n",
+ "
\n",
+ " \n",
+ " 1094 | \n",
+ " 1.325000 | \n",
+ "
\n",
+ " \n",
+ " 1095 | \n",
+ " 1.493900 | \n",
+ "
\n",
+ " \n",
+ " 1096 | \n",
+ " 1.340800 | \n",
+ "
\n",
+ " \n",
+ " 1097 | \n",
+ " 1.408600 | \n",
+ "
\n",
+ " \n",
+ " 1098 | \n",
+ " 1.440300 | \n",
+ "
\n",
+ " \n",
+ " 1099 | \n",
+ " 1.479400 | \n",
+ "
\n",
+ " \n",
+ " 1100 | \n",
+ " 1.390100 | \n",
+ "
\n",
+ " \n",
+ " 1101 | \n",
+ " 1.433100 | \n",
+ "
\n",
+ " \n",
+ " 1102 | \n",
+ " 1.412200 | \n",
+ "
\n",
+ " \n",
+ " 1103 | \n",
+ " 1.382300 | \n",
+ "
\n",
+ " \n",
+ " 1104 | \n",
+ " 1.555300 | \n",
+ "
\n",
+ " \n",
+ " 1105 | \n",
+ " 1.388700 | \n",
+ "
\n",
+ " \n",
+ " 1106 | \n",
+ " 1.450600 | \n",
+ "
\n",
+ " \n",
+ " 1107 | \n",
+ " 1.552400 | \n",
+ "
\n",
+ " \n",
+ " 1108 | \n",
+ " 1.364400 | \n",
+ "
\n",
+ " \n",
+ " 1109 | \n",
+ " 1.338100 | \n",
+ "
\n",
+ " \n",
+ " 1110 | \n",
+ " 1.367700 | \n",
+ "
\n",
+ " \n",
+ " 1111 | \n",
+ " 1.418500 | \n",
+ "
\n",
+ " \n",
+ " 1112 | \n",
+ " 1.449400 | \n",
+ "
\n",
+ " \n",
+ " 1113 | \n",
+ " 1.381700 | \n",
+ "
\n",
+ " \n",
+ " 1114 | \n",
+ " 1.358700 | \n",
+ "
\n",
+ " \n",
+ " 1115 | \n",
+ " 1.406300 | \n",
+ "
\n",
+ " \n",
+ " 1116 | \n",
+ " 1.406500 | \n",
+ "
\n",
+ " \n",
+ " 1117 | \n",
+ " 1.363200 | \n",
+ "
\n",
+ " \n",
+ " 1118 | \n",
+ " 1.523900 | \n",
+ "
\n",
+ " \n",
+ " 1119 | \n",
+ " 1.433600 | \n",
+ "
\n",
+ " \n",
+ " 1120 | \n",
+ " 1.452200 | \n",
+ "
\n",
+ " \n",
+ " 1121 | \n",
+ " 1.544300 | \n",
+ "
\n",
+ " \n",
+ " 1122 | \n",
+ " 1.465900 | \n",
+ "
\n",
+ " \n",
+ " 1123 | \n",
+ " 1.377600 | \n",
+ "
\n",
+ " \n",
+ " 1124 | \n",
+ " 1.440300 | \n",
+ "
\n",
+ " \n",
+ " 1125 | \n",
+ " 1.302200 | \n",
+ "
\n",
+ " \n",
+ " 1126 | \n",
+ " 1.468200 | \n",
+ "
\n",
+ " \n",
+ " 1127 | \n",
+ " 1.378600 | \n",
+ "
\n",
+ " \n",
+ " 1128 | \n",
+ " 1.435300 | \n",
+ "
\n",
+ " \n",
+ " 1129 | \n",
+ " 1.479000 | \n",
+ "
\n",
+ " \n",
+ " 1130 | \n",
+ " 1.382800 | \n",
+ "
\n",
+ " \n",
+ " 1131 | \n",
+ " 1.424500 | \n",
+ "
\n",
+ " \n",
+ " 1132 | \n",
+ " 1.428200 | \n",
+ "
\n",
+ " \n",
+ " 1133 | \n",
+ " 1.469500 | \n",
+ "
\n",
+ " \n",
+ " 1134 | \n",
+ " 1.468200 | \n",
+ "
\n",
+ " \n",
+ " 1135 | \n",
+ " 1.444400 | \n",
+ "
\n",
+ " \n",
+ " 1136 | \n",
+ " 1.544500 | \n",
+ "
\n",
+ " \n",
+ " 1137 | \n",
+ " 1.431600 | \n",
+ "
\n",
+ " \n",
+ " 1138 | \n",
+ " 1.442000 | \n",
+ "
\n",
+ " \n",
+ " 1139 | \n",
+ " 1.537700 | \n",
+ "
\n",
+ " \n",
+ " 1140 | \n",
+ " 1.396300 | \n",
+ "
\n",
+ " \n",
+ " 1141 | \n",
+ " 1.410400 | \n",
+ "
\n",
+ " \n",
+ " 1142 | \n",
+ " 1.438300 | \n",
+ "
\n",
+ " \n",
+ " 1143 | \n",
+ " 1.270800 | \n",
+ "
\n",
+ " \n",
+ " 1144 | \n",
+ " 1.449900 | \n",
+ "
\n",
+ " \n",
+ " 1145 | \n",
+ " 1.492000 | \n",
+ "
\n",
+ " \n",
+ " 1146 | \n",
+ " 1.487600 | \n",
+ "
\n",
+ " \n",
+ " 1147 | \n",
+ " 1.369300 | \n",
+ "
\n",
+ " \n",
+ " 1148 | \n",
+ " 1.365100 | \n",
+ "
\n",
+ " \n",
+ " 1149 | \n",
+ " 1.491000 | \n",
+ "
\n",
+ " \n",
+ " 1150 | \n",
+ " 1.413800 | \n",
+ "
\n",
+ " \n",
+ " 1151 | \n",
+ " 1.563000 | \n",
+ "
\n",
+ " \n",
+ " 1152 | \n",
+ " 1.507800 | \n",
+ "
\n",
+ " \n",
+ " 1153 | \n",
+ " 1.301600 | \n",
+ "
\n",
+ " \n",
+ " 1154 | \n",
+ " 1.511200 | \n",
+ "
\n",
+ " \n",
+ " 1155 | \n",
+ " 1.538100 | \n",
+ "
\n",
+ " \n",
+ " 1156 | \n",
+ " 1.301700 | \n",
+ "
\n",
+ " \n",
+ " 1157 | \n",
+ " 1.379500 | \n",
+ "
\n",
+ " \n",
+ " 1158 | \n",
+ " 1.603100 | \n",
+ "
\n",
+ " \n",
+ " 1159 | \n",
+ " 1.453100 | \n",
+ "
\n",
+ " \n",
+ " 1160 | \n",
+ " 1.422200 | \n",
+ "
\n",
+ " \n",
+ " 1161 | \n",
+ " 1.597700 | \n",
+ "
\n",
+ " \n",
+ " 1162 | \n",
+ " 1.541900 | \n",
+ "
\n",
+ " \n",
+ " 1163 | \n",
+ " 1.456500 | \n",
+ "
\n",
+ " \n",
+ " 1164 | \n",
+ " 1.467500 | \n",
+ "
\n",
+ " \n",
+ " 1165 | \n",
+ " 1.303300 | \n",
+ "
\n",
+ " \n",
+ " 1166 | \n",
+ " 1.495300 | \n",
+ "
\n",
+ " \n",
+ " 1167 | \n",
+ " 1.454000 | \n",
+ "
\n",
+ " \n",
+ " 1168 | \n",
+ " 1.562400 | \n",
+ "
\n",
+ " \n",
+ " 1169 | \n",
+ " 1.406800 | \n",
+ "
\n",
+ " \n",
+ " 1170 | \n",
+ " 1.247900 | \n",
+ "
\n",
+ " \n",
+ " 1171 | \n",
+ " 1.631900 | \n",
+ "
\n",
+ " \n",
+ " 1172 | \n",
+ " 1.394800 | \n",
+ "
\n",
+ " \n",
+ " 1173 | \n",
+ " 1.493100 | \n",
+ "
\n",
+ " \n",
+ " 1174 | \n",
+ " 1.379300 | \n",
+ "
\n",
+ " \n",
+ " 1175 | \n",
+ " 1.334400 | \n",
+ "
\n",
+ " \n",
+ " 1176 | \n",
+ " 1.499200 | \n",
+ "
\n",
+ " \n",
+ " 1177 | \n",
+ " 1.505100 | \n",
+ "
\n",
+ " \n",
+ " 1178 | \n",
+ " 1.415100 | \n",
+ "
\n",
+ " \n",
+ " 1179 | \n",
+ " 1.453500 | \n",
+ "
\n",
+ " \n",
+ " 1180 | \n",
+ " 1.368400 | \n",
+ "
\n",
+ " \n",
+ " 1181 | \n",
+ " 1.459900 | \n",
+ "
\n",
+ " \n",
+ " 1182 | \n",
+ " 1.544000 | \n",
+ "
\n",
+ " \n",
+ " 1183 | \n",
+ " 1.549300 | \n",
+ "
\n",
+ " \n",
+ " 1184 | \n",
+ " 1.580900 | \n",
+ "
\n",
+ " \n",
+ " 1185 | \n",
+ " 1.456400 | \n",
+ "
\n",
+ " \n",
+ " 1186 | \n",
+ " 1.465700 | \n",
+ "
\n",
+ " \n",
+ " 1187 | \n",
+ " 1.457900 | \n",
+ "
\n",
+ " \n",
+ " 1188 | \n",
+ " 1.497100 | \n",
+ "
\n",
+ " \n",
+ " 1189 | \n",
+ " 1.600700 | \n",
+ "
\n",
+ " \n",
+ " 1190 | \n",
+ " 1.438900 | \n",
+ "
\n",
+ " \n",
+ " 1191 | \n",
+ " 1.406400 | \n",
+ "
\n",
+ " \n",
+ " 1192 | \n",
+ " 1.415300 | \n",
+ "
\n",
+ " \n",
+ " 1193 | \n",
+ " 1.442900 | \n",
+ "
\n",
+ " \n",
+ " 1194 | \n",
+ " 1.488600 | \n",
+ "
\n",
+ " \n",
+ " 1195 | \n",
+ " 1.457500 | \n",
+ "
\n",
+ " \n",
+ " 1196 | \n",
+ " 1.484800 | \n",
+ "
\n",
+ " \n",
+ " 1197 | \n",
+ " 1.455100 | \n",
+ "
\n",
+ " \n",
+ " 1198 | \n",
+ " 1.467500 | \n",
+ "
\n",
+ " \n",
+ " 1199 | \n",
+ " 1.568700 | \n",
+ "
\n",
+ " \n",
+ " 1200 | \n",
+ " 1.466500 | \n",
+ "
\n",
+ " \n",
+ " 1201 | \n",
+ " 1.495300 | \n",
+ "
\n",
+ " \n",
+ " 1202 | \n",
+ " 1.496600 | \n",
+ "
\n",
+ " \n",
+ " 1203 | \n",
+ " 1.500400 | \n",
+ "
\n",
+ " \n",
+ " 1204 | \n",
+ " 1.571200 | \n",
+ "
\n",
+ " \n",
+ " 1205 | \n",
+ " 1.448100 | \n",
+ "
\n",
+ " \n",
+ " 1206 | \n",
+ " 1.405400 | \n",
+ "
\n",
+ " \n",
+ " 1207 | \n",
+ " 1.510100 | \n",
+ "
\n",
+ " \n",
+ " 1208 | \n",
+ " 1.400100 | \n",
+ "
\n",
+ " \n",
+ " 1209 | \n",
+ " 1.461100 | \n",
+ "
\n",
+ " \n",
+ " 1210 | \n",
+ " 1.368100 | \n",
+ "
\n",
+ " \n",
+ " 1211 | \n",
+ " 1.474400 | \n",
+ "
\n",
+ " \n",
+ " 1212 | \n",
+ " 1.363600 | \n",
+ "
\n",
+ " \n",
+ " 1213 | \n",
+ " 1.564700 | \n",
+ "
\n",
+ " \n",
+ " 1214 | \n",
+ " 1.553300 | \n",
+ "
\n",
+ " \n",
+ " 1215 | \n",
+ " 1.326500 | \n",
+ "
\n",
+ " \n",
+ " 1216 | \n",
+ " 1.338000 | \n",
+ "
\n",
+ " \n",
+ " 1217 | \n",
+ " 1.407600 | \n",
+ "
\n",
+ " \n",
+ " 1218 | \n",
+ " 1.584600 | \n",
+ "
\n",
+ " \n",
+ " 1219 | \n",
+ " 1.384300 | \n",
+ "
\n",
+ " \n",
+ " 1220 | \n",
+ " 1.461900 | \n",
+ "
\n",
+ " \n",
+ " 1221 | \n",
+ " 1.384800 | \n",
+ "
\n",
+ " \n",
+ " 1222 | \n",
+ " 1.406000 | \n",
+ "
\n",
+ " \n",
+ " 1223 | \n",
+ " 1.500400 | \n",
+ "
\n",
+ " \n",
+ " 1224 | \n",
+ " 1.351400 | \n",
+ "
\n",
+ " \n",
+ " 1225 | \n",
+ " 1.399500 | \n",
+ "
\n",
+ " \n",
+ " 1226 | \n",
+ " 1.415000 | \n",
+ "
\n",
+ " \n",
+ " 1227 | \n",
+ " 1.287200 | \n",
+ "
\n",
+ " \n",
+ " 1228 | \n",
+ " 1.417100 | \n",
+ "
\n",
+ " \n",
+ " 1229 | \n",
+ " 1.372600 | \n",
+ "
\n",
+ " \n",
+ " 1230 | \n",
+ " 1.329200 | \n",
+ "
\n",
+ " \n",
+ " 1231 | \n",
+ " 1.547300 | \n",
+ "
\n",
+ " \n",
+ " 1232 | \n",
+ " 1.395000 | \n",
+ "
\n",
+ " \n",
+ " 1233 | \n",
+ " 1.321300 | \n",
+ "
\n",
+ " \n",
+ " 1234 | \n",
+ " 1.296700 | \n",
+ "
\n",
+ " \n",
+ " 1235 | \n",
+ " 1.414100 | \n",
+ "
\n",
+ " \n",
+ " 1236 | \n",
+ " 1.383600 | \n",
+ "
\n",
+ " \n",
+ " 1237 | \n",
+ " 1.384600 | \n",
+ "
\n",
+ " \n",
+ " 1238 | \n",
+ " 1.401000 | \n",
+ "
\n",
+ " \n",
+ " 1239 | \n",
+ " 1.403600 | \n",
+ "
\n",
+ " \n",
+ " 1240 | \n",
+ " 1.572300 | \n",
+ "
\n",
+ " \n",
+ " 1241 | \n",
+ " 1.422600 | \n",
+ "
\n",
+ " \n",
+ " 1242 | \n",
+ " 1.386300 | \n",
+ "
\n",
+ " \n",
+ " 1243 | \n",
+ " 1.365200 | \n",
+ "
\n",
+ " \n",
+ " 1244 | \n",
+ " 1.430600 | \n",
+ "
\n",
+ " \n",
+ " 1245 | \n",
+ " 1.573700 | \n",
+ "
\n",
+ " \n",
+ " 1246 | \n",
+ " 1.518800 | \n",
+ "
\n",
+ " \n",
+ " 1247 | \n",
+ " 1.399000 | \n",
+ "
\n",
+ " \n",
+ " 1248 | \n",
+ " 1.408100 | \n",
+ "
\n",
+ " \n",
+ " 1249 | \n",
+ " 1.542400 | \n",
+ "
\n",
+ " \n",
+ " 1250 | \n",
+ " 1.504800 | \n",
+ "
\n",
+ " \n",
+ "
"
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "trainer_stats = trainer.train()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "id": "f70aca8b",
+ "metadata": {
+ "cellView": "form",
+ "execution": {
+ "iopub.execute_input": "2024-03-28T03:52:49.697610Z",
+ "iopub.status.busy": "2024-03-28T03:52:49.696564Z",
+ "iopub.status.idle": "2024-03-28T03:52:49.704999Z",
+ "shell.execute_reply": "2024-03-28T03:52:49.703738Z"
+ },
+ "id": "pCqnaKmlO1U9",
+ "outputId": "e34545d2-808b-44b3-80d5-c21ca7a2da16",
+ "papermill": {
+ "duration": 0.146166,
+ "end_time": "2024-03-28T03:52:49.707144",
+ "exception": false,
+ "start_time": "2024-03-28T03:52:49.560978",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "13140.0413 seconds used for training.\n",
+ "219.0 minutes used for training.\n",
+ "Peak reserved memory = 7.268 GB.\n",
+ "Peak reserved memory for training = 2.768 GB.\n",
+ "Peak reserved memory % of max memory = 49.281 %.\n",
+ "Peak reserved memory for training % of max memory = 18.769 %.\n"
+ ]
+ }
+ ],
+ "source": [
+ "#@title Show final memory and time stats\n",
+ "used_memory = round(torch.cuda.max_memory_reserved() / 1024 / 1024 / 1024, 3)\n",
+ "used_memory_for_lora = round(used_memory - start_gpu_memory, 3)\n",
+ "used_percentage = round(used_memory /max_memory*100, 3)\n",
+ "lora_percentage = round(used_memory_for_lora/max_memory*100, 3)\n",
+ "print(f\"{trainer_stats.metrics['train_runtime']} seconds used for training.\")\n",
+ "print(f\"{round(trainer_stats.metrics['train_runtime']/60, 2)} minutes used for training.\")\n",
+ "print(f\"Peak reserved memory = {used_memory} GB.\")\n",
+ "print(f\"Peak reserved memory for training = {used_memory_for_lora} GB.\")\n",
+ "print(f\"Peak reserved memory % of max memory = {used_percentage} %.\")\n",
+ "print(f\"Peak reserved memory for training % of max memory = {lora_percentage} %.\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "8d5dff6e",
+ "metadata": {
+ "id": "ekOmTR1hSNcr",
+ "papermill": {
+ "duration": 0.139123,
+ "end_time": "2024-03-28T03:52:49.982166",
+ "exception": false,
+ "start_time": "2024-03-28T03:52:49.843043",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "source": [
+ "\n",
+ "### Inference\n",
+ "Let's run the model! Since we're using `ChatML`, use `apply_chat_template` with `add_generation_prompt` set to `True` for inference."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "id": "bebbdda7",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2024-03-28T03:52:50.245764Z",
+ "iopub.status.busy": "2024-03-28T03:52:50.244849Z",
+ "iopub.status.idle": "2024-03-28T03:52:52.425841Z",
+ "shell.execute_reply": "2024-03-28T03:52:52.424679Z"
+ },
+ "id": "kR3gIAX-SM2q",
+ "outputId": "d1b13317-4781-4078-90bf-0de74d93f6e4",
+ "papermill": {
+ "duration": 2.314189,
+ "end_time": "2024-03-28T03:52:52.428079",
+ "exception": false,
+ "start_time": "2024-03-28T03:52:50.113890",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "Unsloth: Will map <|im_end|> to EOS = <|im_end|>.\n",
+ "The attention mask and the pad token id were not set. As a consequence, you may observe unexpected behavior. Please pass your input's `attention_mask` to obtain reliable results.\n",
+ "Setting `pad_token_id` to `eos_token_id`:2 for open-end generation.\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "['<|im_start|>user\\nContinue the fibonnaci sequence: 1, 1, 2, 3, 5, 8,<|im_end|> \\n<|im_start|>assistant\\n13<|im_end|>']"
+ ]
+ },
+ "execution_count": 12,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "from unsloth.chat_templates import get_chat_template\n",
+ "\n",
+ "tokenizer = get_chat_template(\n",
+ " tokenizer,\n",
+ " chat_template = \"chatml\", # Supports zephyr, chatml, mistral, llama, alpaca, vicuna, vicuna_old, unsloth\n",
+ " mapping = {\"role\" : \"from\", \"content\" : \"value\", \"user\" : \"human\", \"assistant\" : \"gpt\"}, # ShareGPT style\n",
+ " map_eos_token = True, # Maps <|im_end|> to instead\n",
+ ")\n",
+ "\n",
+ "FastLanguageModel.for_inference(model) # Enable native 2x faster inference\n",
+ "\n",
+ "messages = [\n",
+ " {\"from\": \"human\", \"value\": \"Continue the fibonnaci sequence: 1, 1, 2, 3, 5, 8,\"},\n",
+ "]\n",
+ "inputs = tokenizer.apply_chat_template(\n",
+ " messages,\n",
+ " tokenize = True,\n",
+ " add_generation_prompt = True, # Must add for generation\n",
+ " return_tensors = \"pt\",\n",
+ ").to(\"cuda\")\n",
+ "\n",
+ "outputs = model.generate(input_ids = inputs, max_new_tokens = 64, use_cache = True)\n",
+ "tokenizer.batch_decode(outputs)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "6c7afa6f",
+ "metadata": {
+ "id": "CrSvZObor0lY",
+ "papermill": {
+ "duration": 0.138692,
+ "end_time": "2024-03-28T03:52:52.712525",
+ "exception": false,
+ "start_time": "2024-03-28T03:52:52.573833",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "source": [
+ " You can also use a `TextStreamer` for continuous inference - so you can see the generation token by token, instead of waiting the whole time!"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "id": "5cf2ad38",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2024-03-28T03:52:52.996612Z",
+ "iopub.status.busy": "2024-03-28T03:52:52.996171Z",
+ "iopub.status.idle": "2024-03-28T03:52:53.413691Z",
+ "shell.execute_reply": "2024-03-28T03:52:53.412641Z"
+ },
+ "id": "e2pEuRb1r2Vg",
+ "outputId": "3b7b291c-8237-4473-c3db-8bc5ebbf07f9",
+ "papermill": {
+ "duration": 0.561247,
+ "end_time": "2024-03-28T03:52:53.415994",
+ "exception": false,
+ "start_time": "2024-03-28T03:52:52.854747",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "The attention mask and the pad token id were not set. As a consequence, you may observe unexpected behavior. Please pass your input's `attention_mask` to obtain reliable results.\n",
+ "Setting `pad_token_id` to `eos_token_id`:2 for open-end generation.\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "<|im_start|>user\n",
+ "Continue the fibonnaci sequence: 1, 1, 2, 3, 5, 8,<|im_end|> \n",
+ "<|im_start|>assistant\n",
+ "13<|im_end|>\n"
+ ]
+ }
+ ],
+ "source": [
+ "FastLanguageModel.for_inference(model) # Enable native 2x faster inference\n",
+ "\n",
+ "messages = [\n",
+ " {\"from\": \"human\", \"value\": \"Continue the fibonnaci sequence: 1, 1, 2, 3, 5, 8,\"},\n",
+ "]\n",
+ "inputs = tokenizer.apply_chat_template(\n",
+ " messages,\n",
+ " tokenize = True,\n",
+ " add_generation_prompt = True, # Must add for generation\n",
+ " return_tensors = \"pt\",\n",
+ ").to(\"cuda\")\n",
+ "\n",
+ "from transformers import TextStreamer\n",
+ "text_streamer = TextStreamer(tokenizer)\n",
+ "_ = model.generate(input_ids = inputs, streamer = text_streamer, max_new_tokens = 128, use_cache = True)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "92f1fa55",
+ "metadata": {
+ "id": "uMuVrWbjAzhc",
+ "papermill": {
+ "duration": 0.129027,
+ "end_time": "2024-03-28T03:52:53.687793",
+ "exception": false,
+ "start_time": "2024-03-28T03:52:53.558766",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "source": [
+ "\n",
+ "### Saving, loading finetuned models\n",
+ "To save the final model as LoRA adapters, either use Huggingface's `push_to_hub` for an online save or `save_pretrained` for a local save.\n",
+ "\n",
+ "**[NOTE]** This ONLY saves the LoRA adapters, and not the full model. To save to 16bit or GGUF, scroll down!"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "id": "ab909818",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2024-03-28T03:52:53.959109Z",
+ "iopub.status.busy": "2024-03-28T03:52:53.958163Z",
+ "iopub.status.idle": "2024-03-28T03:53:02.098759Z",
+ "shell.execute_reply": "2024-03-28T03:53:02.097480Z"
+ },
+ "id": "upcOlWe7A1vc",
+ "papermill": {
+ "duration": 8.274548,
+ "end_time": "2024-03-28T03:53:02.101048",
+ "exception": false,
+ "start_time": "2024-03-28T03:52:53.826500",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "outputs": [
+ {
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "50b8d4fbd7064930bbdf338796e9f09b",
+ "version_major": 2,
+ "version_minor": 0
+ },
+ "text/plain": [
+ "README.md: 0%| | 0.00/579 [00:00, ?B/s]"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "cd246bcf2d034bb2af58ceb7524df6c1",
+ "version_major": 2,
+ "version_minor": 0
+ },
+ "text/plain": [
+ "adapter_model.safetensors: 0%| | 0.00/168M [00:00, ?B/s]"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Saved model to https://huggingface.co/scoliono/groupchat_lora\n"
+ ]
+ }
+ ],
+ "source": [
+ "model.save_pretrained(\"lora_model\") # Local saving\n",
+ "#model.push_to_hub(\"scoliono/groupchat_lora\", token = \"\") # Online saving"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "a4861d1b",
+ "metadata": {
+ "id": "AEEcJ4qfC7Lp",
+ "papermill": {
+ "duration": 0.145328,
+ "end_time": "2024-03-28T03:53:02.385386",
+ "exception": false,
+ "start_time": "2024-03-28T03:53:02.240058",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "source": [
+ "Now if you want to load the LoRA adapters we just saved for inference, set `False` to `True`:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "id": "a93cbbb6",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2024-03-28T03:53:02.657540Z",
+ "iopub.status.busy": "2024-03-28T03:53:02.657048Z",
+ "iopub.status.idle": "2024-03-28T03:53:03.262596Z",
+ "shell.execute_reply": "2024-03-28T03:53:03.261476Z"
+ },
+ "id": "MKX_XKs_BNZR",
+ "outputId": "d8dbd499-1881-41b1-9347-d3213ab473df",
+ "papermill": {
+ "duration": 0.738494,
+ "end_time": "2024-03-28T03:53:03.264761",
+ "exception": false,
+ "start_time": "2024-03-28T03:53:02.526267",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "The attention mask and the pad token id were not set. As a consequence, you may observe unexpected behavior. Please pass your input's `attention_mask` to obtain reliable results.\n",
+ "Setting `pad_token_id` to `eos_token_id`:2 for open-end generation.\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "<|im_start|>user\n",
+ "What is a famous tall tower in Paris?<|im_end|> \n",
+ "<|im_start|>assistant\n",
+ "Eiffel tower<|im_end|>\n"
+ ]
+ }
+ ],
+ "source": [
+ "if False:\n",
+ " from unsloth import FastLanguageModel\n",
+ " model, tokenizer = FastLanguageModel.from_pretrained(\n",
+ " model_name = \"lora_model\", # YOUR MODEL YOU USED FOR TRAINING\n",
+ " max_seq_length = max_seq_length,\n",
+ " dtype = dtype,\n",
+ " load_in_4bit = load_in_4bit,\n",
+ " )\n",
+ " FastLanguageModel.for_inference(model) # Enable native 2x faster inference\n",
+ "\n",
+ "messages = [\n",
+ " {\"from\": \"human\", \"value\": \"What is a famous tall tower in Paris?\"},\n",
+ "]\n",
+ "inputs = tokenizer.apply_chat_template(\n",
+ " messages,\n",
+ " tokenize = True,\n",
+ " add_generation_prompt = True, # Must add for generation\n",
+ " return_tensors = \"pt\",\n",
+ ").to(\"cuda\")\n",
+ "\n",
+ "from transformers import TextStreamer\n",
+ "text_streamer = TextStreamer(tokenizer)\n",
+ "_ = model.generate(input_ids = inputs, streamer = text_streamer, max_new_tokens = 128, use_cache = True)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "9ada1c2a",
+ "metadata": {
+ "id": "QQMjaNrjsU5_",
+ "papermill": {
+ "duration": 0.126538,
+ "end_time": "2024-03-28T03:53:03.522957",
+ "exception": false,
+ "start_time": "2024-03-28T03:53:03.396419",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "source": [
+ "You can also use Hugging Face's `AutoModelForPeftCausalLM`. Only use this if you do not have `unsloth` installed. It can be hopelessly slow, since `4bit` model downloading is not supported, and Unsloth's **inference is 2x faster**."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 16,
+ "id": "3c9e54cd",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2024-03-28T03:53:03.788897Z",
+ "iopub.status.busy": "2024-03-28T03:53:03.788477Z",
+ "iopub.status.idle": "2024-03-28T03:53:03.793816Z",
+ "shell.execute_reply": "2024-03-28T03:53:03.792849Z"
+ },
+ "id": "yFfaXG0WsQuE",
+ "papermill": {
+ "duration": 0.139822,
+ "end_time": "2024-03-28T03:53:03.795815",
+ "exception": false,
+ "start_time": "2024-03-28T03:53:03.655993",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "if False:\n",
+ " # I highly do NOT suggest - use Unsloth if possible\n",
+ " from peft import AutoModelForPeftCausalLM\n",
+ " from transformers import AutoTokenizer\n",
+ " model = AutoModelForPeftCausalLM.from_pretrained(\n",
+ " \"lora_model\", # YOUR MODEL YOU USED FOR TRAINING\n",
+ " load_in_4bit = load_in_4bit,\n",
+ " )\n",
+ " tokenizer = AutoTokenizer.from_pretrained(\"lora_model\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "ae8b6865",
+ "metadata": {
+ "id": "f422JgM9sdVT",
+ "papermill": {
+ "duration": 0.133177,
+ "end_time": "2024-03-28T03:53:04.058229",
+ "exception": false,
+ "start_time": "2024-03-28T03:53:03.925052",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "source": [
+ "### Saving to float16 for VLLM\n",
+ "\n",
+ "We also support saving to `float16` directly. Select `merged_16bit` for float16 or `merged_4bit` for int4. We also allow `lora` adapters as a fallback. Use `push_to_hub_merged` to upload to your Hugging Face account! You can go to https://huggingface.co/settings/tokens for your personal tokens."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 17,
+ "id": "73bff174",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2024-03-28T03:53:04.324460Z",
+ "iopub.status.busy": "2024-03-28T03:53:04.324036Z",
+ "iopub.status.idle": "2024-03-28T03:53:04.331159Z",
+ "shell.execute_reply": "2024-03-28T03:53:04.330165Z"
+ },
+ "id": "iHjt_SMYsd3P",
+ "papermill": {
+ "duration": 0.140814,
+ "end_time": "2024-03-28T03:53:04.333322",
+ "exception": false,
+ "start_time": "2024-03-28T03:53:04.192508",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "# Merge to 16bit\n",
+ "if False: model.save_pretrained_merged(\"model\", tokenizer, save_method = \"merged_16bit\",)\n",
+ "if False: model.push_to_hub_merged(\"hf/model\", tokenizer, save_method = \"merged_16bit\", token = \"\")\n",
+ "\n",
+ "# Merge to 4bit\n",
+ "if False: model.save_pretrained_merged(\"model\", tokenizer, save_method = \"merged_4bit\",)\n",
+ "if False: model.push_to_hub_merged(\"hf/model\", tokenizer, save_method = \"merged_4bit\", token = \"\")\n",
+ "\n",
+ "# Just LoRA adapters\n",
+ "if False: model.save_pretrained_merged(\"model\", tokenizer, save_method = \"lora\",)\n",
+ "if False: model.push_to_hub_merged(\"hf/model\", tokenizer, save_method = \"lora\", token = \"\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "96270533",
+ "metadata": {
+ "id": "TCv4vXHd61i7",
+ "papermill": {
+ "duration": 0.141816,
+ "end_time": "2024-03-28T03:53:04.663103",
+ "exception": false,
+ "start_time": "2024-03-28T03:53:04.521287",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "source": [
+ "### GGUF / llama.cpp Conversion\n",
+ "To save to `GGUF` / `llama.cpp`, we support it natively now! We clone `llama.cpp` and we default save it to `q8_0`. We allow all methods like `q4_k_m`. Use `save_pretrained_gguf` for local saving and `push_to_hub_gguf` for uploading to HF.\n",
+ "\n",
+ "Some supported quant methods (full list on our [Wiki page](https://github.com/unslothai/unsloth/wiki#gguf-quantization-options)):\n",
+ "* `q8_0` - Fast conversion. High resource use, but generally acceptable.\n",
+ "* `q4_k_m` - Recommended. Uses Q6_K for half of the attention.wv and feed_forward.w2 tensors, else Q4_K.\n",
+ "* `q5_k_m` - Recommended. Uses Q6_K for half of the attention.wv and feed_forward.w2 tensors, else Q5_K."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "id": "cd67a84a",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2024-03-28T03:53:04.940718Z",
+ "iopub.status.busy": "2024-03-28T03:53:04.939798Z",
+ "iopub.status.idle": "2024-03-28T03:53:04.948324Z",
+ "shell.execute_reply": "2024-03-28T03:53:04.947106Z"
+ },
+ "id": "FqfebeAdT073",
+ "papermill": {
+ "duration": 0.147412,
+ "end_time": "2024-03-28T03:53:04.951208",
+ "exception": false,
+ "start_time": "2024-03-28T03:53:04.803796",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "# Save to 8bit Q8_0\n",
+ "if False: model.save_pretrained_gguf(\"model\", tokenizer,)\n",
+ "if False: model.push_to_hub_gguf(\"hf/model\", tokenizer, token = \"\")\n",
+ "\n",
+ "# Save to 16bit GGUF\n",
+ "if False: model.save_pretrained_gguf(\"model\", tokenizer, quantization_method = \"f16\")\n",
+ "if False: model.push_to_hub_gguf(\"hf/model\", tokenizer, quantization_method = \"f16\", token = \"\")\n",
+ "\n",
+ "# Save to q4_k_m GGUF\n",
+ "if False: model.save_pretrained_gguf(\"model\", tokenizer, quantization_method = \"q4_k_m\")\n",
+ "if False: model.push_to_hub_gguf(\"hf/model\", tokenizer, quantization_method = \"q4_k_m\", token = \"\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "974bde3a",
+ "metadata": {
+ "id": "bDp0zNpwe6U_",
+ "papermill": {
+ "duration": 0.159571,
+ "end_time": "2024-03-28T03:53:05.263051",
+ "exception": false,
+ "start_time": "2024-03-28T03:53:05.103480",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "source": [
+ "Now, use the `model-unsloth.gguf` file or `model-unsloth-Q4_K_M.gguf` file in `llama.cpp` or a UI based system like `GPT4All`. You can install GPT4All by going [here](https://gpt4all.io/index.html)."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "c25b0c14",
+ "metadata": {
+ "id": "Zt9CHJqO6p30",
+ "papermill": {
+ "duration": 0.126368,
+ "end_time": "2024-03-28T03:53:05.527719",
+ "exception": false,
+ "start_time": "2024-03-28T03:53:05.401351",
+ "status": "completed"
+ },
+ "tags": []
+ },
+ "source": [
+ "And we're done! If you have any questions on Unsloth, we have a [Discord](https://discord.gg/u54VK8m8tk) channel! If you find any bugs or want to keep updated with the latest LLM stuff, or need help, join projects etc, feel free to join our Discord!\n",
+ "\n",
+ "Some other links:\n",
+ "1. Zephyr DPO 2x faster [free Colab](https://colab.research.google.com/drive/15vttTpzzVXv_tJwEk-hIcQ0S9FcEWvwP?usp=sharing)\n",
+ "2. Llama 7b 2x faster [free Colab](https://colab.research.google.com/drive/1lBzz5KeZJKXjvivbYvmGarix9Ao6Wxe5?usp=sharing)\n",
+ "3. TinyLlama 4x faster full Alpaca 52K in 1 hour [free Colab](https://colab.research.google.com/drive/1AZghoNBQaMDgWJpi4RbffGM1h6raLUj9?usp=sharing)\n",
+ "4. CodeLlama 34b 2x faster [A100 on Colab](https://colab.research.google.com/drive/1y7A0AxE3y8gdj4AVkl2aZX47Xu3P1wJT?usp=sharing)\n",
+ "5. Mistral 7b [free Kaggle version](https://www.kaggle.com/code/danielhanchen/kaggle-mistral-7b-unsloth-notebook)\n",
+ "6. We also did a [blog](https://huggingface.co/blog/unsloth-trl) with 🤗 HuggingFace, and we're in the TRL [docs](https://huggingface.co/docs/trl/main/en/sft_trainer#accelerate-fine-tuning-2x-using-unsloth)!\n",
+ "7. Text completions like novel writing [notebook](https://colab.research.google.com/drive/1ef-tab5bhkvWmBOObepl1WgJvfvSzn5Q?usp=sharing)\n",
+ "9. Gemma 6 trillion tokens is 2.5x faster! [free Colab](https://colab.research.google.com/drive/10NbwlsRChbma1v55m8LAPYG15uQv6HLo?usp=sharing)\n",
+ "\n",
+ "\n",
+ "
\n",
+ "
\n",
+ "
Support our work if you can! Thanks!\n",
+ "
"
+ ]
+ }
+ ],
+ "metadata": {
+ "accelerator": "GPU",
+ "colab": {
+ "gpuType": "T4",
+ "provenance": []
+ },
+ "kaggle": {
+ "accelerator": "nvidiaTeslaT4",
+ "dataSources": [
+ {
+ "databundleVersionId": 8069943,
+ "datasetId": 4675483,
+ "sourceId": 7960221,
+ "sourceType": "datasetVersion"
+ }
+ ],
+ "dockerImageVersionId": 30674,
+ "isGpuEnabled": true,
+ "isInternetEnabled": true,
+ "language": "python",
+ "sourceType": "notebook"
+ },
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.13"
+ },
+ "papermill": {
+ "default_parameters": {},
+ "duration": 13460.036096,
+ "end_time": "2024-03-28T03:53:09.311636",
+ "environment_variables": {},
+ "exception": null,
+ "input_path": "__notebook__.ipynb",
+ "output_path": "__notebook__.ipynb",
+ "parameters": {},
+ "start_time": "2024-03-28T00:08:49.275540",
+ "version": "2.5.0"
+ },
+ "widgets": {
+ "application/vnd.jupyter.widget-state+json": {
+ "state": {
+ "0115b7fb1d69489db936630116a2fd95": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "0291f4d1f4734954946a71afef1e1519": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HBoxModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HBoxModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HBoxView",
+ "box_style": "",
+ "children": [
+ "IPY_MODEL_158f6c4ac7424b11b5c52e11c44b7048",
+ "IPY_MODEL_a3f2617affcc4004a6c90a0aff265efa",
+ "IPY_MODEL_32872e40c0614d86b196c45a6daf6f0f"
+ ],
+ "layout": "IPY_MODEL_d181e166c5624b939f1e52ea36f0b501"
+ }
+ },
+ "07b887a58fa24b6bbac72e1912ce6f2d": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_c923307d2ec34dcbb6ffc9c72d6bd907",
+ "placeholder": "",
+ "style": "IPY_MODEL_3fea601e40ad410e9fcbd3f8746e0114",
+ "value": " 493k/493k [00:00<00:00, 32.6MB/s]"
+ }
+ },
+ "0ae97a9e92204ba3adeda272690ca00a": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "FloatProgressModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "FloatProgressModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "ProgressView",
+ "bar_style": "success",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_dfc5e79bc6df4103862801033bf223c4",
+ "max": 10000,
+ "min": 0,
+ "orientation": "horizontal",
+ "style": "IPY_MODEL_80730c77f59f4d77a7309968c3565eff",
+ "value": 10000
+ }
+ },
+ "0aea08bb4b424c028157fc47920880ba": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_e8060f5acdc44fb4b08b9031cda127be",
+ "placeholder": "",
+ "style": "IPY_MODEL_83892ec619ff46d69a90e59fb1a31699",
+ "value": " 438/438 [00:00<00:00, 36.5kB/s]"
+ }
+ },
+ "0b7256cbb0df4612892401684edf4c6b": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "0c08cdcc7686498f9dfca58fd546aa81": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "0d0bc50934304ebbbd5767774c1bba6c": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "ProgressStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "ProgressStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "bar_color": null,
+ "description_width": ""
+ }
+ },
+ "107e30bda0f7447ba2cae4b4b8e439d8": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "1173bc14f32e46e99dc6860367629ed0": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "1334bfd9e02f46baaced48e306753b61": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "FloatProgressModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "FloatProgressModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "ProgressView",
+ "bar_style": "success",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_5e3a5f9429134ac5bae37c33e20141f4",
+ "max": 493443,
+ "min": 0,
+ "orientation": "horizontal",
+ "style": "IPY_MODEL_645835ede2a64a698f3e7ae720ebb524",
+ "value": 493443
+ }
+ },
+ "1430fff1572a43fbadecedb54630c004": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "158f6c4ac7424b11b5c52e11c44b7048": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_cfdedb4f201c4a1d82c6dd2c19d66eab",
+ "placeholder": "",
+ "style": "IPY_MODEL_1d2abbae10d5489ca31f2923e3608ce5",
+ "value": "model.safetensors: 100%"
+ }
+ },
+ "1990a051263f4ecba55c2d190b89d18a": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "1a6305f14afe472c911c243151a7d754": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_f5610eccab294c99ab9004d5cd496185",
+ "placeholder": "",
+ "style": "IPY_MODEL_5971293c1eb243e9835079846e57e888",
+ "value": "tokenizer.model: 100%"
+ }
+ },
+ "1d2abbae10d5489ca31f2923e3608ce5": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "22ac3169e6a74ef59352c9c1a633940a": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_3524bea482354033a8867f5d8808e2bf",
+ "placeholder": "",
+ "style": "IPY_MODEL_ca92331767b848c5af22af298c030b7d",
+ "value": " 579/579 [00:00<00:00, 34.5kB/s]"
+ }
+ },
+ "235497034e5c4a3293591576999865e8": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_d368cc755f8142f7852135fcac43007b",
+ "placeholder": "",
+ "style": "IPY_MODEL_53494ae85aea477c9daac23b952ebd01",
+ "value": "Map (num_proc=2): 100%"
+ }
+ },
+ "2543f037a9b048b6a49d074fac474e7a": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "32872e40c0614d86b196c45a6daf6f0f": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_e5601c035def4d1ab1e146370d3c0dc9",
+ "placeholder": "",
+ "style": "IPY_MODEL_e8d7de2bb23a4d67b5f9e8b72e8b6e86",
+ "value": " 4.13G/4.13G [00:23<00:00, 207MB/s]"
+ }
+ },
+ "3524bea482354033a8867f5d8808e2bf": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "374a7e4da1474ba1b4827d867f1a49ad": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "3fea601e40ad410e9fcbd3f8746e0114": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "430e25b60ae544ca932139c6232e8580": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_8eb0d1b9b2e54c5f8515768af89677aa",
+ "placeholder": "",
+ "style": "IPY_MODEL_6867905d03624962a4b4ebcf818ea218",
+ "value": "generation_config.json: 100%"
+ }
+ },
+ "4646579350cb4e44b075a18f9eee2d33": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "46660e1d775d4280b3d942b03d1bb54e": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "4728bc10d9e64cee99025ee07689a449": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "ProgressStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "ProgressStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "bar_color": null,
+ "description_width": ""
+ }
+ },
+ "48d009073a83434f946f292e830a371d": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "ProgressStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "ProgressStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "bar_color": null,
+ "description_width": ""
+ }
+ },
+ "50b8d4fbd7064930bbdf338796e9f09b": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HBoxModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HBoxModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HBoxView",
+ "box_style": "",
+ "children": [
+ "IPY_MODEL_5177a503e53a4fb38db35b3f13400ef6",
+ "IPY_MODEL_c858048f53f94de2a1b0a1982834c358",
+ "IPY_MODEL_22ac3169e6a74ef59352c9c1a633940a"
+ ],
+ "layout": "IPY_MODEL_ab97cbd99a3c4013a9ad37fabbb1cc9f"
+ }
+ },
+ "5177a503e53a4fb38db35b3f13400ef6": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_1990a051263f4ecba55c2d190b89d18a",
+ "placeholder": "",
+ "style": "IPY_MODEL_a7298a133c0c4735afb22c3622b47124",
+ "value": "README.md: 100%"
+ }
+ },
+ "528c2302fc7a480895a850916b8efaba": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "533ef0f3916b423cbc151d4b1929030e": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "53494ae85aea477c9daac23b952ebd01": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "54a9cc9d3fea4b2f84a90d9540654950": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "58648d0ab785418089b24914c46df7a4": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HBoxModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HBoxModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HBoxView",
+ "box_style": "",
+ "children": [
+ "IPY_MODEL_e9db2da5e7604322a8ef642e23eb9e3d",
+ "IPY_MODEL_f2786dfcd82a4e14b70e5bae4a78fe51",
+ "IPY_MODEL_5e3e1df5ad73479cb78e29820aba2f93"
+ ],
+ "layout": "IPY_MODEL_528c2302fc7a480895a850916b8efaba"
+ }
+ },
+ "59233cb5bb5849d0800cde9d3c129184": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HBoxModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HBoxModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HBoxView",
+ "box_style": "",
+ "children": [
+ "IPY_MODEL_eeef8455b81547249bff4b71af02f6b1",
+ "IPY_MODEL_5f2c77ad6e1f4c1091016b86703586e9",
+ "IPY_MODEL_0aea08bb4b424c028157fc47920880ba"
+ ],
+ "layout": "IPY_MODEL_d8e77b14b649416bbe4f177d7c3013ed"
+ }
+ },
+ "5971293c1eb243e9835079846e57e888": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "5e3a5f9429134ac5bae37c33e20141f4": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "5e3e1df5ad73479cb78e29820aba2f93": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_bf88208e773c423b904ce0f8820f85bc",
+ "placeholder": "",
+ "style": "IPY_MODEL_54a9cc9d3fea4b2f84a90d9540654950",
+ "value": " 1.05k/1.05k [00:00<00:00, 81.7kB/s]"
+ }
+ },
+ "5f2c77ad6e1f4c1091016b86703586e9": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "FloatProgressModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "FloatProgressModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "ProgressView",
+ "bar_style": "success",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_ff7a5292b7d649e3a089c5efa790122e",
+ "max": 438,
+ "min": 0,
+ "orientation": "horizontal",
+ "style": "IPY_MODEL_e1cfa28343e844c1ae94eb516a91c2b4",
+ "value": 438
+ }
+ },
+ "637ccbae2ae14930be31fd9484a86b52": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "FloatProgressModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "FloatProgressModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "ProgressView",
+ "bar_style": "success",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_b0496f143171468fa45614691e9a47f8",
+ "max": 116,
+ "min": 0,
+ "orientation": "horizontal",
+ "style": "IPY_MODEL_c03125a200e1408fbb7fa0a20abebda0",
+ "value": 116
+ }
+ },
+ "645835ede2a64a698f3e7ae720ebb524": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "ProgressStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "ProgressStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "bar_color": null,
+ "description_width": ""
+ }
+ },
+ "6867905d03624962a4b4ebcf818ea218": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "6d4015e9a13b4ce99d5d1b2b1aa86ded": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "71c42de5a7fd4df1a85295b07c33c1e6": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "720036ee71e346ce909dfd8c5e3a7e29": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "FloatProgressModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "FloatProgressModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "ProgressView",
+ "bar_style": "success",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_1173bc14f32e46e99dc6860367629ed0",
+ "max": 167832240,
+ "min": 0,
+ "orientation": "horizontal",
+ "style": "IPY_MODEL_a27d39bbfd7642269fa34a53496bf3c9",
+ "value": 167832240
+ }
+ },
+ "776a339dd3044e83b7a34444ddc008e2": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "77abdec067904f0081dfd825b2521ec4": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_1430fff1572a43fbadecedb54630c004",
+ "placeholder": "",
+ "style": "IPY_MODEL_0b7256cbb0df4612892401684edf4c6b",
+ "value": "tokenizer.json: 100%"
+ }
+ },
+ "792a56bc40ea4529886d964473d96954": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "7963938987694831aedd24fd6061a8df": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_dd636d310bfe41d7a22230b10918ffe3",
+ "placeholder": "",
+ "style": "IPY_MODEL_f364f4bbb7f847f780ffc157ab872703",
+ "value": " 116/116 [00:00<00:00, 8.89kB/s]"
+ }
+ },
+ "80730c77f59f4d77a7309968c3565eff": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "ProgressStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "ProgressStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "bar_color": null,
+ "description_width": ""
+ }
+ },
+ "83892ec619ff46d69a90e59fb1a31699": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "83fd232b47314e42bbe7526e0ab7aecf": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "84af480c5f6c4ad490074ef103af5628": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HBoxModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HBoxModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HBoxView",
+ "box_style": "",
+ "children": [
+ "IPY_MODEL_77abdec067904f0081dfd825b2521ec4",
+ "IPY_MODEL_d3995c4222f9481b81dd057d8177afd8",
+ "IPY_MODEL_b1040d0671624ccd965ea5f99adbd0d3"
+ ],
+ "layout": "IPY_MODEL_776a339dd3044e83b7a34444ddc008e2"
+ }
+ },
+ "87f33e5bd27c4885892c9826a384e8a5": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "8eb0d1b9b2e54c5f8515768af89677aa": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "92da558bad0e4330a2b4b3041ed24aad": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HBoxModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HBoxModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HBoxView",
+ "box_style": "",
+ "children": [
+ "IPY_MODEL_1a6305f14afe472c911c243151a7d754",
+ "IPY_MODEL_1334bfd9e02f46baaced48e306753b61",
+ "IPY_MODEL_07b887a58fa24b6bbac72e1912ce6f2d"
+ ],
+ "layout": "IPY_MODEL_46660e1d775d4280b3d942b03d1bb54e"
+ }
+ },
+ "944f3bb3fc0f4ef594424d1ed90f391b": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HBoxModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HBoxModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HBoxView",
+ "box_style": "",
+ "children": [
+ "IPY_MODEL_430e25b60ae544ca932139c6232e8580",
+ "IPY_MODEL_637ccbae2ae14930be31fd9484a86b52",
+ "IPY_MODEL_7963938987694831aedd24fd6061a8df"
+ ],
+ "layout": "IPY_MODEL_b94ada6bb88f48e0a53f8a048b05d21f"
+ }
+ },
+ "9a80eab1ff094eab808ae272670a4f54": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "9aeb5170934f4e7da9748e8859e40e30": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HBoxModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HBoxModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HBoxView",
+ "box_style": "",
+ "children": [
+ "IPY_MODEL_b92b53c1c7f7451dafcc0e12c701040d",
+ "IPY_MODEL_e6b7217de79c44429aa4ca0cace148d5",
+ "IPY_MODEL_e7aead7914b54afa8c293ef84bb3e2b0"
+ ],
+ "layout": "IPY_MODEL_bc9b2ac4aead46daa51e5d14cf3600c1"
+ }
+ },
+ "9fd39fcb80594936a1b160dfe290877b": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "a03228389e2841db96cafad58e0a45f1": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "ProgressStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "ProgressStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "bar_color": null,
+ "description_width": ""
+ }
+ },
+ "a12807eb4e4f4ad7bcd78339c990cdd4": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "a24ef002e2284485b815a060326d1754": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "ProgressStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "ProgressStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "bar_color": null,
+ "description_width": ""
+ }
+ },
+ "a27d39bbfd7642269fa34a53496bf3c9": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "ProgressStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "ProgressStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "bar_color": null,
+ "description_width": ""
+ }
+ },
+ "a3f2617affcc4004a6c90a0aff265efa": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "FloatProgressModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "FloatProgressModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "ProgressView",
+ "bar_style": "success",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_71c42de5a7fd4df1a85295b07c33c1e6",
+ "max": 4125687906,
+ "min": 0,
+ "orientation": "horizontal",
+ "style": "IPY_MODEL_a24ef002e2284485b815a060326d1754",
+ "value": 4125687906
+ }
+ },
+ "a7298a133c0c4735afb22c3622b47124": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "a96c934ca8474a3a8376256b78e9eedf": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "ab97cbd99a3c4013a9ad37fabbb1cc9f": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "aeb4c56bfa5f44dfa78b95944c76e1cd": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_e5a882949c3346808f857cf04d74d7de",
+ "placeholder": "",
+ "style": "IPY_MODEL_0115b7fb1d69489db936630116a2fd95",
+ "value": " 10000/10000 [00:04<00:00, 2544.61 examples/s]"
+ }
+ },
+ "b0496f143171468fa45614691e9a47f8": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "b1040d0671624ccd965ea5f99adbd0d3": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_533ef0f3916b423cbc151d4b1929030e",
+ "placeholder": "",
+ "style": "IPY_MODEL_c6264af6bcf3482caefca09fce59935b",
+ "value": " 1.80M/1.80M [00:00<00:00, 25.2MB/s]"
+ }
+ },
+ "b178f8303b8548d49eb4d1bd4e419f6e": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "b83b5e49b46742f1b266dc5f96fa7c73": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "b92b53c1c7f7451dafcc0e12c701040d": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_b178f8303b8548d49eb4d1bd4e419f6e",
+ "placeholder": "",
+ "style": "IPY_MODEL_6d4015e9a13b4ce99d5d1b2b1aa86ded",
+ "value": "tokenizer_config.json: 100%"
+ }
+ },
+ "b94ada6bb88f48e0a53f8a048b05d21f": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "bc9b2ac4aead46daa51e5d14cf3600c1": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "bf88208e773c423b904ce0f8820f85bc": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "c03125a200e1408fbb7fa0a20abebda0": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "ProgressStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "ProgressStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "bar_color": null,
+ "description_width": ""
+ }
+ },
+ "c112e03f254a4cfe92ae9e0d584aa462": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_e0406a70ccb64deb833e45b38f3a680e",
+ "placeholder": "",
+ "style": "IPY_MODEL_e18b8e5bfe714bc680f59a70ac38c014",
+ "value": "adapter_model.safetensors: 100%"
+ }
+ },
+ "c6135b0b86f6471181ee3cf4945652c8": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_0c08cdcc7686498f9dfca58fd546aa81",
+ "placeholder": "",
+ "style": "IPY_MODEL_a96c934ca8474a3a8376256b78e9eedf",
+ "value": " 168M/168M [00:04<00:00, 31.3MB/s]"
+ }
+ },
+ "c6264af6bcf3482caefca09fce59935b": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "c858048f53f94de2a1b0a1982834c358": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "FloatProgressModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "FloatProgressModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "ProgressView",
+ "bar_style": "success",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_a12807eb4e4f4ad7bcd78339c990cdd4",
+ "max": 579,
+ "min": 0,
+ "orientation": "horizontal",
+ "style": "IPY_MODEL_0d0bc50934304ebbbd5767774c1bba6c",
+ "value": 579
+ }
+ },
+ "c8edb73b6d7f4e97a737a3bfd67d8a75": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "c923307d2ec34dcbb6ffc9c72d6bd907": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "ca92331767b848c5af22af298c030b7d": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "cd246bcf2d034bb2af58ceb7524df6c1": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HBoxModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HBoxModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HBoxView",
+ "box_style": "",
+ "children": [
+ "IPY_MODEL_c112e03f254a4cfe92ae9e0d584aa462",
+ "IPY_MODEL_720036ee71e346ce909dfd8c5e3a7e29",
+ "IPY_MODEL_c6135b0b86f6471181ee3cf4945652c8"
+ ],
+ "layout": "IPY_MODEL_4646579350cb4e44b075a18f9eee2d33"
+ }
+ },
+ "cfdedb4f201c4a1d82c6dd2c19d66eab": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "d181e166c5624b939f1e52ea36f0b501": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "d368cc755f8142f7852135fcac43007b": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "d3995c4222f9481b81dd057d8177afd8": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "FloatProgressModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "FloatProgressModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "ProgressView",
+ "bar_style": "success",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_c8edb73b6d7f4e97a737a3bfd67d8a75",
+ "max": 1795303,
+ "min": 0,
+ "orientation": "horizontal",
+ "style": "IPY_MODEL_a03228389e2841db96cafad58e0a45f1",
+ "value": 1795303
+ }
+ },
+ "d8e77b14b649416bbe4f177d7c3013ed": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "dd636d310bfe41d7a22230b10918ffe3": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "dfc5e79bc6df4103862801033bf223c4": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "e0406a70ccb64deb833e45b38f3a680e": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "e18b8e5bfe714bc680f59a70ac38c014": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "e1cfa28343e844c1ae94eb516a91c2b4": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "ProgressStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "ProgressStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "bar_color": null,
+ "description_width": ""
+ }
+ },
+ "e363d483a5134f5d873c11f936d2d9f5": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HBoxModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HBoxModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HBoxView",
+ "box_style": "",
+ "children": [
+ "IPY_MODEL_235497034e5c4a3293591576999865e8",
+ "IPY_MODEL_0ae97a9e92204ba3adeda272690ca00a",
+ "IPY_MODEL_aeb4c56bfa5f44dfa78b95944c76e1cd"
+ ],
+ "layout": "IPY_MODEL_9fd39fcb80594936a1b160dfe290877b"
+ }
+ },
+ "e5601c035def4d1ab1e146370d3c0dc9": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "e5a882949c3346808f857cf04d74d7de": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "e6b7217de79c44429aa4ca0cace148d5": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "FloatProgressModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "FloatProgressModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "ProgressView",
+ "bar_style": "success",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_374a7e4da1474ba1b4827d867f1a49ad",
+ "max": 971,
+ "min": 0,
+ "orientation": "horizontal",
+ "style": "IPY_MODEL_48d009073a83434f946f292e830a371d",
+ "value": 971
+ }
+ },
+ "e7aead7914b54afa8c293ef84bb3e2b0": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_107e30bda0f7447ba2cae4b4b8e439d8",
+ "placeholder": "",
+ "style": "IPY_MODEL_792a56bc40ea4529886d964473d96954",
+ "value": " 971/971 [00:00<00:00, 74.7kB/s]"
+ }
+ },
+ "e8060f5acdc44fb4b08b9031cda127be": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "e8d7de2bb23a4d67b5f9e8b72e8b6e86": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "e9db2da5e7604322a8ef642e23eb9e3d": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_9a80eab1ff094eab808ae272670a4f54",
+ "placeholder": "",
+ "style": "IPY_MODEL_2543f037a9b048b6a49d074fac474e7a",
+ "value": "config.json: 100%"
+ }
+ },
+ "eeef8455b81547249bff4b71af02f6b1": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_87f33e5bd27c4885892c9826a384e8a5",
+ "placeholder": "",
+ "style": "IPY_MODEL_b83b5e49b46742f1b266dc5f96fa7c73",
+ "value": "special_tokens_map.json: 100%"
+ }
+ },
+ "f2786dfcd82a4e14b70e5bae4a78fe51": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "FloatProgressModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "FloatProgressModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "1.5.0",
+ "_view_name": "ProgressView",
+ "bar_style": "success",
+ "description": "",
+ "description_tooltip": null,
+ "layout": "IPY_MODEL_83fd232b47314e42bbe7526e0ab7aecf",
+ "max": 1055,
+ "min": 0,
+ "orientation": "horizontal",
+ "style": "IPY_MODEL_4728bc10d9e64cee99025ee07689a449",
+ "value": 1055
+ }
+ },
+ "f364f4bbb7f847f780ffc157ab872703": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "1.5.0",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_model_name": "DescriptionStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "StyleView",
+ "description_width": ""
+ }
+ },
+ "f5610eccab294c99ab9004d5cd496185": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "ff7a5292b7d649e3a089c5efa790122e": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "1.2.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.2.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "1.2.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "overflow_x": null,
+ "overflow_y": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ }
+ },
+ "version_major": 2,
+ "version_minor": 0
+ }
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}