{ "cells": [ { "cell_type": "markdown", "id": "f9f8a4ee", "metadata": { "id": "IqM-T1RTzY6C", "papermill": { "duration": 0.038159, "end_time": "2024-03-28T00:08:52.505173", "exception": false, "start_time": "2024-03-28T00:08:52.467014", "status": "completed" }, "tags": [] }, "source": [ "To run this, press \"*Runtime*\" and press \"*Run all*\" on a **free** Tesla T4 Google Colab instance!\n", "
\n", "\n", "To install Unsloth on your own computer, follow the installation instructions on our Github page [here](https://github.com/unslothai/unsloth#installation-instructions---conda).\n", "\n", "You will learn how to do [data prep](#Data), how to [train](#Train), how to [run the model](#Inference), & [how to save it](#Save) (eg for Llama.cpp).\n", "\n", "This notebook uses the `ChatML` format for conversation style finetunes. We use [Open Assistant conversations](https://huggingface.co/datasets/philschmid/guanaco-sharegpt-style) in ShareGPT style." ] }, { "cell_type": "code", "execution_count": 1, "id": "4c970fa0", "metadata": { "execution": { "iopub.execute_input": "2024-03-28T00:08:52.578683Z", "iopub.status.busy": "2024-03-28T00:08:52.577956Z", "iopub.status.idle": "2024-03-28T00:12:44.149130Z", "shell.execute_reply": "2024-03-28T00:12:44.147764Z" }, "id": "2eSvM9zX_2d3", "papermill": { "duration": 231.609576, "end_time": "2024-03-28T00:12:44.151750", "exception": false, "start_time": "2024-03-28T00:08:52.542174", "status": "completed" }, "tags": [] }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Looking in indexes: https://download.pytorch.org/whl/cu121\r\n", "Collecting xformers\r\n", " Downloading https://download.pytorch.org/whl/cu121/xformers-0.0.25-cp310-cp310-manylinux2014_x86_64.whl (222.5 MB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m222.5/222.5 MB\u001b[0m \u001b[31m7.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hRequirement already satisfied: numpy in /opt/conda/lib/python3.10/site-packages (from xformers) (1.26.4)\r\n", "Collecting torch==2.2.1 (from xformers)\r\n", " Downloading https://download.pytorch.org/whl/cu121/torch-2.2.1%2Bcu121-cp310-cp310-linux_x86_64.whl (757.3 MB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m757.3/757.3 MB\u001b[0m \u001b[31m2.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hRequirement already satisfied: filelock in /opt/conda/lib/python3.10/site-packages (from torch==2.2.1->xformers) (3.13.1)\r\n", "Requirement already satisfied: typing-extensions>=4.8.0 in /opt/conda/lib/python3.10/site-packages (from torch==2.2.1->xformers) (4.9.0)\r\n", "Requirement already satisfied: sympy in /opt/conda/lib/python3.10/site-packages (from torch==2.2.1->xformers) (1.12)\r\n", "Requirement already satisfied: networkx in /opt/conda/lib/python3.10/site-packages (from torch==2.2.1->xformers) (3.2.1)\r\n", "Requirement already satisfied: jinja2 in /opt/conda/lib/python3.10/site-packages (from torch==2.2.1->xformers) (3.1.2)\r\n", "Requirement already satisfied: fsspec in /opt/conda/lib/python3.10/site-packages (from torch==2.2.1->xformers) (2024.3.0)\r\n", "Collecting nvidia-cuda-nvrtc-cu12==12.1.105 (from torch==2.2.1->xformers)\r\n", " Downloading https://download.pytorch.org/whl/cu121/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (23.7 MB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m23.7/23.7 MB\u001b[0m \u001b[31m26.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hCollecting nvidia-cuda-runtime-cu12==12.1.105 (from torch==2.2.1->xformers)\r\n", " Downloading https://download.pytorch.org/whl/cu121/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (823 kB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m823.6/823.6 kB\u001b[0m \u001b[31m40.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hCollecting nvidia-cuda-cupti-cu12==12.1.105 (from torch==2.2.1->xformers)\r\n", " Downloading https://download.pytorch.org/whl/cu121/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (14.1 MB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m14.1/14.1 MB\u001b[0m \u001b[31m84.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hCollecting nvidia-cudnn-cu12==8.9.2.26 (from torch==2.2.1->xformers)\r\n", " Downloading https://download.pytorch.org/whl/cu121/nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl (731.7 MB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m731.7/731.7 MB\u001b[0m \u001b[31m2.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hCollecting nvidia-cublas-cu12==12.1.3.1 (from torch==2.2.1->xformers)\r\n", " Downloading https://download.pytorch.org/whl/cu121/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl (410.6 MB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m410.6/410.6 MB\u001b[0m \u001b[31m1.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hCollecting nvidia-cufft-cu12==11.0.2.54 (from torch==2.2.1->xformers)\r\n", " Downloading https://download.pytorch.org/whl/cu121/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl (121.6 MB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m121.6/121.6 MB\u001b[0m \u001b[31m4.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hCollecting nvidia-curand-cu12==10.3.2.106 (from torch==2.2.1->xformers)\r\n", " Downloading https://download.pytorch.org/whl/cu121/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl (56.5 MB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m56.5/56.5 MB\u001b[0m \u001b[31m28.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hCollecting nvidia-cusolver-cu12==11.4.5.107 (from torch==2.2.1->xformers)\r\n", " Downloading https://download.pytorch.org/whl/cu121/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl (124.2 MB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m124.2/124.2 MB\u001b[0m \u001b[31m12.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hCollecting nvidia-cusparse-cu12==12.1.0.106 (from torch==2.2.1->xformers)\r\n", " Downloading https://download.pytorch.org/whl/cu121/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl (196.0 MB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m196.0/196.0 MB\u001b[0m \u001b[31m8.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hCollecting nvidia-nccl-cu12==2.19.3 (from torch==2.2.1->xformers)\r\n", " Downloading https://download.pytorch.org/whl/cu121/nvidia_nccl_cu12-2.19.3-py3-none-manylinux1_x86_64.whl (166.0 MB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m166.0/166.0 MB\u001b[0m \u001b[31m9.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hCollecting nvidia-nvtx-cu12==12.1.105 (from torch==2.2.1->xformers)\r\n", " Downloading https://download.pytorch.org/whl/cu121/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (99 kB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m99.1/99.1 kB\u001b[0m \u001b[31m6.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hCollecting triton==2.2.0 (from torch==2.2.1->xformers)\r\n", " Downloading https://download.pytorch.org/whl/triton-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (167.9 MB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m167.9/167.9 MB\u001b[0m \u001b[31m8.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hCollecting nvidia-nvjitlink-cu12 (from nvidia-cusolver-cu12==11.4.5.107->torch==2.2.1->xformers)\r\n", " Downloading https://download.pytorch.org/whl/cu121/nvidia_nvjitlink_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (19.8 MB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m19.8/19.8 MB\u001b[0m \u001b[31m46.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hRequirement already satisfied: MarkupSafe>=2.0 in /opt/conda/lib/python3.10/site-packages (from jinja2->torch==2.2.1->xformers) (2.1.3)\r\n", "Requirement already satisfied: mpmath>=0.19 in /opt/conda/lib/python3.10/site-packages (from sympy->torch==2.2.1->xformers) (1.3.0)\r\n", "Installing collected packages: triton, nvidia-nvtx-cu12, nvidia-nvjitlink-cu12, nvidia-nccl-cu12, nvidia-curand-cu12, nvidia-cufft-cu12, nvidia-cuda-runtime-cu12, nvidia-cuda-nvrtc-cu12, nvidia-cuda-cupti-cu12, nvidia-cublas-cu12, nvidia-cusparse-cu12, nvidia-cudnn-cu12, nvidia-cusolver-cu12, torch, xformers\r\n", " Attempting uninstall: torch\r\n", " Found existing installation: torch 2.1.2\r\n", " Uninstalling torch-2.1.2:\r\n", " Successfully uninstalled torch-2.1.2\r\n", "Successfully installed nvidia-cublas-cu12-12.1.3.1 nvidia-cuda-cupti-cu12-12.1.105 nvidia-cuda-nvrtc-cu12-12.1.105 nvidia-cuda-runtime-cu12-12.1.105 nvidia-cudnn-cu12-8.9.2.26 nvidia-cufft-cu12-11.0.2.54 nvidia-curand-cu12-10.3.2.106 nvidia-cusolver-cu12-11.4.5.107 nvidia-cusparse-cu12-12.1.0.106 nvidia-nccl-cu12-2.19.3 nvidia-nvjitlink-cu12-12.1.105 nvidia-nvtx-cu12-12.1.105 torch-2.2.1+cu121 triton-2.2.0 xformers-0.0.25\r\n", "Collecting unsloth@ git+https://github.com/unslothai/unsloth.git (from unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git)\r\n", " Cloning https://github.com/unslothai/unsloth.git to /tmp/pip-install-yfpl0t85/unsloth_63990d9a4b8e4d6ca74bbfccdc6198cb\r\n", " Running command git clone --filter=blob:none --quiet https://github.com/unslothai/unsloth.git /tmp/pip-install-yfpl0t85/unsloth_63990d9a4b8e4d6ca74bbfccdc6198cb\r\n", " Resolved https://github.com/unslothai/unsloth.git to commit a68aebc1fa17755ffbcdafc9239e7ca37ab21657\r\n", " Installing build dependencies ... \u001b[?25l-\b \b\\\b \b|\b \b/\b \b-\b \b\\\b \b|\b \b/\b \b-\b \bdone\r\n", "\u001b[?25h Getting requirements to build wheel ... \u001b[?25l-\b \b\\\b \bdone\r\n", "\u001b[?25h Installing backend dependencies ... \u001b[?25l-\b \b\\\b \b|\b \b/\b \bdone\r\n", "\u001b[?25h Preparing metadata (pyproject.toml) ... \u001b[?25l-\b \b\\\b \bdone\r\n", "\u001b[?25hRequirement already satisfied: triton in /opt/conda/lib/python3.10/site-packages (2.2.0)\r\n", "Collecting datasets==2.17.1\r\n", " Downloading datasets-2.17.1-py3-none-any.whl.metadata (20 kB)\r\n", "Requirement already satisfied: filelock in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (3.13.1)\r\n", "Requirement already satisfied: numpy>=1.17 in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (1.26.4)\r\n", "Collecting pyarrow>=12.0.0 (from datasets==2.17.1)\r\n", " Downloading pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl.metadata (3.0 kB)\r\n", "Collecting pyarrow-hotfix (from datasets==2.17.1)\r\n", " Downloading pyarrow_hotfix-0.6-py3-none-any.whl.metadata (3.6 kB)\r\n", "Requirement already satisfied: dill<0.3.9,>=0.3.0 in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (0.3.8)\r\n", "Requirement already satisfied: pandas in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (2.1.4)\r\n", "Requirement already satisfied: requests>=2.19.0 in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (2.31.0)\r\n", "Requirement already satisfied: tqdm>=4.62.1 in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (4.66.1)\r\n", "Requirement already satisfied: xxhash in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (3.4.1)\r\n", "Requirement already satisfied: multiprocess in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (0.70.16)\r\n", "Collecting fsspec<=2023.10.0,>=2023.1.0 (from fsspec[http]<=2023.10.0,>=2023.1.0->datasets==2.17.1)\r\n", " Downloading fsspec-2023.10.0-py3-none-any.whl.metadata (6.8 kB)\r\n", "Requirement already satisfied: aiohttp in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (3.9.1)\r\n", "Requirement already satisfied: huggingface-hub>=0.19.4 in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (0.21.4)\r\n", "Requirement already satisfied: packaging in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (21.3)\r\n", "Requirement already satisfied: pyyaml>=5.1 in /opt/conda/lib/python3.10/site-packages (from datasets==2.17.1) (6.0.1)\r\n", "Collecting bitsandbytes (from unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git)\r\n", " Downloading bitsandbytes-0.43.0-py3-none-manylinux_2_24_x86_64.whl.metadata (1.8 kB)\r\n", "Requirement already satisfied: attrs>=17.3.0 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets==2.17.1) (23.2.0)\r\n", "Requirement already satisfied: multidict<7.0,>=4.5 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets==2.17.1) (6.0.4)\r\n", "Requirement already satisfied: yarl<2.0,>=1.0 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets==2.17.1) (1.9.3)\r\n", "Requirement already satisfied: frozenlist>=1.1.1 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets==2.17.1) (1.4.1)\r\n", "Requirement already satisfied: aiosignal>=1.1.2 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets==2.17.1) (1.3.1)\r\n", "Requirement already satisfied: async-timeout<5.0,>=4.0 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets==2.17.1) (4.0.3)\r\n", "Requirement already satisfied: typing-extensions>=3.7.4.3 in /opt/conda/lib/python3.10/site-packages (from huggingface-hub>=0.19.4->datasets==2.17.1) (4.9.0)\r\n", "Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /opt/conda/lib/python3.10/site-packages (from packaging->datasets==2.17.1) (3.1.1)\r\n", "Requirement already satisfied: charset-normalizer<4,>=2 in /opt/conda/lib/python3.10/site-packages (from requests>=2.19.0->datasets==2.17.1) (3.3.2)\r\n", "Requirement already satisfied: idna<4,>=2.5 in /opt/conda/lib/python3.10/site-packages (from requests>=2.19.0->datasets==2.17.1) (3.6)\r\n", "Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/conda/lib/python3.10/site-packages (from requests>=2.19.0->datasets==2.17.1) (1.26.18)\r\n", "Requirement already satisfied: certifi>=2017.4.17 in /opt/conda/lib/python3.10/site-packages (from requests>=2.19.0->datasets==2.17.1) (2024.2.2)\r\n", "Requirement already satisfied: torch in /opt/conda/lib/python3.10/site-packages (from bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (2.2.1+cu121)\r\n", "Requirement already satisfied: python-dateutil>=2.8.2 in /opt/conda/lib/python3.10/site-packages (from pandas->datasets==2.17.1) (2.9.0.post0)\r\n", "Requirement already satisfied: pytz>=2020.1 in /opt/conda/lib/python3.10/site-packages (from pandas->datasets==2.17.1) (2023.3.post1)\r\n", "Requirement already satisfied: tzdata>=2022.1 in /opt/conda/lib/python3.10/site-packages (from pandas->datasets==2.17.1) (2023.4)\r\n", "Collecting tyro (from unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git)\r\n", " Downloading tyro-0.7.3-py3-none-any.whl.metadata (7.7 kB)\r\n", "Requirement already satisfied: transformers>=4.38.2 in /opt/conda/lib/python3.10/site-packages (from unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (4.38.2)\r\n", "Requirement already satisfied: sentencepiece in /opt/conda/lib/python3.10/site-packages (from unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (0.2.0)\r\n", "Requirement already satisfied: psutil in /opt/conda/lib/python3.10/site-packages (from unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (5.9.3)\r\n", "Requirement already satisfied: wheel>=0.42.0 in /opt/conda/lib/python3.10/site-packages (from unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (0.42.0)\r\n", "Requirement already satisfied: accelerate>=0.26.1 in /opt/conda/lib/python3.10/site-packages (from unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (0.28.0)\r\n", "Collecting trl>=0.7.9 (from unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git)\r\n", " Downloading trl-0.8.1-py3-none-any.whl.metadata (11 kB)\r\n", "Collecting peft>=0.7.1 (from unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git)\r\n", " Downloading peft-0.10.0-py3-none-any.whl.metadata (13 kB)\r\n", "Requirement already satisfied: safetensors>=0.3.1 in /opt/conda/lib/python3.10/site-packages (from accelerate>=0.26.1->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (0.4.2)\r\n", "Requirement already satisfied: six>=1.5 in /opt/conda/lib/python3.10/site-packages (from python-dateutil>=2.8.2->pandas->datasets==2.17.1) (1.16.0)\r\n", "Requirement already satisfied: sympy in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (1.12)\r\n", "Requirement already satisfied: networkx in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (3.2.1)\r\n", "Requirement already satisfied: jinja2 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (3.1.2)\r\n", "Requirement already satisfied: nvidia-cuda-nvrtc-cu12==12.1.105 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (12.1.105)\r\n", "Requirement already satisfied: nvidia-cuda-runtime-cu12==12.1.105 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (12.1.105)\r\n", "Requirement already satisfied: nvidia-cuda-cupti-cu12==12.1.105 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (12.1.105)\r\n", "Requirement already satisfied: nvidia-cudnn-cu12==8.9.2.26 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (8.9.2.26)\r\n", "Requirement already satisfied: nvidia-cublas-cu12==12.1.3.1 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (12.1.3.1)\r\n", "Requirement already satisfied: nvidia-cufft-cu12==11.0.2.54 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (11.0.2.54)\r\n", "Requirement already satisfied: nvidia-curand-cu12==10.3.2.106 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (10.3.2.106)\r\n", "Requirement already satisfied: nvidia-cusolver-cu12==11.4.5.107 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (11.4.5.107)\r\n", "Requirement already satisfied: nvidia-cusparse-cu12==12.1.0.106 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (12.1.0.106)\r\n", "Requirement already satisfied: nvidia-nccl-cu12==2.19.3 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (2.19.3)\r\n", "Requirement already satisfied: nvidia-nvtx-cu12==12.1.105 in /opt/conda/lib/python3.10/site-packages (from torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (12.1.105)\r\n", "Requirement already satisfied: nvidia-nvjitlink-cu12 in /opt/conda/lib/python3.10/site-packages (from nvidia-cusolver-cu12==11.4.5.107->torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (12.1.105)\r\n", "Requirement already satisfied: regex!=2019.12.17 in /opt/conda/lib/python3.10/site-packages (from transformers>=4.38.2->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (2023.12.25)\r\n", "Requirement already satisfied: tokenizers<0.19,>=0.14 in /opt/conda/lib/python3.10/site-packages (from transformers>=4.38.2->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (0.15.2)\r\n", "Requirement already satisfied: docstring-parser>=0.14.1 in /opt/conda/lib/python3.10/site-packages (from tyro->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (0.15)\r\n", "Requirement already satisfied: rich>=11.1.0 in /opt/conda/lib/python3.10/site-packages (from tyro->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (13.7.0)\r\n", "Collecting shtab>=1.5.6 (from tyro->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git)\r\n", " Downloading shtab-1.7.1-py3-none-any.whl.metadata (7.3 kB)\r\n", "Requirement already satisfied: markdown-it-py>=2.2.0 in /opt/conda/lib/python3.10/site-packages (from rich>=11.1.0->tyro->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (3.0.0)\r\n", "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /opt/conda/lib/python3.10/site-packages (from rich>=11.1.0->tyro->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (2.17.2)\r\n", "Requirement already satisfied: MarkupSafe>=2.0 in /opt/conda/lib/python3.10/site-packages (from jinja2->torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (2.1.3)\r\n", "Requirement already satisfied: mpmath>=0.19 in /opt/conda/lib/python3.10/site-packages (from sympy->torch->bitsandbytes->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (1.3.0)\r\n", "Requirement already satisfied: mdurl~=0.1 in /opt/conda/lib/python3.10/site-packages (from markdown-it-py>=2.2.0->rich>=11.1.0->tyro->unsloth@ git+https://github.com/unslothai/unsloth.git->unsloth[kaggle-new]@ git+https://github.com/unslothai/unsloth.git) (0.1.2)\r\n", "Downloading datasets-2.17.1-py3-none-any.whl (536 kB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m536.7/536.7 kB\u001b[0m \u001b[31m4.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hDownloading fsspec-2023.10.0-py3-none-any.whl (166 kB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m166.4/166.4 kB\u001b[0m \u001b[31m10.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hDownloading pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl (38.3 MB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m38.3/38.3 MB\u001b[0m \u001b[31m36.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hDownloading bitsandbytes-0.43.0-py3-none-manylinux_2_24_x86_64.whl (102.2 MB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m102.2/102.2 MB\u001b[0m \u001b[31m11.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hDownloading pyarrow_hotfix-0.6-py3-none-any.whl (7.9 kB)\r\n", "Downloading peft-0.10.0-py3-none-any.whl (199 kB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m199.1/199.1 kB\u001b[0m \u001b[31m14.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hDownloading trl-0.8.1-py3-none-any.whl (225 kB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m225.0/225.0 kB\u001b[0m \u001b[31m11.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hDownloading tyro-0.7.3-py3-none-any.whl (79 kB)\r\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m79.8/79.8 kB\u001b[0m \u001b[31m5.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", "\u001b[?25hDownloading shtab-1.7.1-py3-none-any.whl (14 kB)\r\n", "Building wheels for collected packages: unsloth\r\n", " Building wheel for unsloth (pyproject.toml) ... \u001b[?25l-\b \b\\\b \b|\b \bdone\r\n", "\u001b[?25h Created wheel for unsloth: filename=unsloth-2024.3-py3-none-any.whl size=93934 sha256=34861411793a48098b4d9e04f35bc2ce841bfae25a980dd6ce151eecc1321a1a\r\n", " Stored in directory: /tmp/pip-ephem-wheel-cache-6kf3ks_c/wheels/ed/d4/e9/76fb290ee3df0a5fc21ce5c2c788e29e9607a2353d8342fd0d\r\n", "Successfully built unsloth\r\n", "Installing collected packages: unsloth, shtab, pyarrow-hotfix, pyarrow, fsspec, tyro, datasets, bitsandbytes, trl, peft\r\n", " Attempting uninstall: pyarrow\r\n", " Found existing installation: pyarrow 11.0.0\r\n", " Uninstalling pyarrow-11.0.0:\r\n", " Successfully uninstalled pyarrow-11.0.0\r\n", " Attempting uninstall: fsspec\r\n", " Found existing installation: fsspec 2024.3.0\r\n", " Uninstalling fsspec-2024.3.0:\r\n", " Successfully uninstalled fsspec-2024.3.0\r\n", " Attempting uninstall: datasets\r\n", " Found existing installation: datasets 2.1.0\r\n", " Uninstalling datasets-2.1.0:\r\n", " Successfully uninstalled datasets-2.1.0\r\n", "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\r\n", "cudf 23.8.0 requires cubinlinker, which is not installed.\r\n", "cudf 23.8.0 requires cupy-cuda11x>=12.0.0, which is not installed.\r\n", "cudf 23.8.0 requires ptxcompiler, which is not installed.\r\n", "cuml 23.8.0 requires cupy-cuda11x>=12.0.0, which is not installed.\r\n", "dask-cudf 23.8.0 requires cupy-cuda11x>=12.0.0, which is not installed.\r\n", "apache-beam 2.46.0 requires dill<0.3.2,>=0.3.1.1, but you have dill 0.3.8 which is incompatible.\r\n", "apache-beam 2.46.0 requires numpy<1.25.0,>=1.14.3, but you have numpy 1.26.4 which is incompatible.\r\n", "apache-beam 2.46.0 requires pyarrow<10.0.0,>=3.0.0, but you have pyarrow 15.0.2 which is incompatible.\r\n", "beatrix-jupyterlab 2023.128.151533 requires jupyterlab~=3.6.0, but you have jupyterlab 4.1.5 which is incompatible.\r\n", "cudf 23.8.0 requires cuda-python<12.0a0,>=11.7.1, but you have cuda-python 12.4.0 which is incompatible.\r\n", "cudf 23.8.0 requires pandas<1.6.0dev0,>=1.3, but you have pandas 2.1.4 which is incompatible.\r\n", "cudf 23.8.0 requires protobuf<5,>=4.21, but you have protobuf 3.20.3 which is incompatible.\r\n", "cudf 23.8.0 requires pyarrow==11.*, but you have pyarrow 15.0.2 which is incompatible.\r\n", "cuml 23.8.0 requires dask==2023.7.1, but you have dask 2024.3.1 which is incompatible.\r\n", "dask-cuda 23.8.0 requires dask==2023.7.1, but you have dask 2024.3.1 which is incompatible.\r\n", "dask-cuda 23.8.0 requires pandas<1.6.0dev0,>=1.3, but you have pandas 2.1.4 which is incompatible.\r\n", "dask-cudf 23.8.0 requires dask==2023.7.1, but you have dask 2024.3.1 which is incompatible.\r\n", "dask-cudf 23.8.0 requires pandas<1.6.0dev0,>=1.3, but you have pandas 2.1.4 which is incompatible.\r\n", "distributed 2023.7.1 requires dask==2023.7.1, but you have dask 2024.3.1 which is incompatible.\r\n", "gcsfs 2023.12.2.post1 requires fsspec==2023.12.2, but you have fsspec 2023.10.0 which is incompatible.\r\n", "raft-dask 23.8.0 requires dask==2023.7.1, but you have dask 2024.3.1 which is incompatible.\r\n", "s3fs 2024.3.0 requires fsspec==2024.3.0, but you have fsspec 2023.10.0 which is incompatible.\u001b[0m\u001b[31m\r\n", "\u001b[0mSuccessfully installed bitsandbytes-0.43.0 datasets-2.17.1 fsspec-2023.10.0 peft-0.10.0 pyarrow-15.0.2 pyarrow-hotfix-0.6 shtab-1.7.1 trl-0.8.1 tyro-0.7.3 unsloth-2024.3\r\n" ] } ], "source": [ "#%%capture\n", "#import torch\n", "#major_version, minor_version = torch.cuda.get_device_capability()\n", "\n", "!pip install -U xformers --index-url https://download.pytorch.org/whl/cu121\n", "!pip install \"unsloth[kaggle-new] @ git+https://github.com/unslothai/unsloth.git\" triton datasets==2.17.1\n", "#if major_version >= 8:\n", "# # Use this for new GPUs like Ampere, Hopper GPUs (RTX 30xx, RTX 40xx, A100, H100, L40)\n", "# !pip install --no-deps packaging ninja einops flash-attn xformers trl peft accelerate bitsandbytes\n", "#else:\n", "# # Use this for older GPUs (V100, Tesla T4, RTX 20xx)\n", "# !pip install --no-deps xformers trl peft accelerate bitsandbytes\n", "\n", "import os\n", "os.environ[\"WANDB_DISABLED\"] = \"true\"" ] }, { "cell_type": "markdown", "id": "c963a9d2", "metadata": { "id": "r2v_X2fA0Df5", "papermill": { "duration": 0.123192, "end_time": "2024-03-28T00:12:44.398766", "exception": false, "start_time": "2024-03-28T00:12:44.275574", "status": "completed" }, "tags": [] }, "source": [ "* We support Llama, Mistral, CodeLlama, TinyLlama, Vicuna, Open Hermes etc\n", "* And Yi, Qwen ([llamafied](https://huggingface.co/models?sort=trending&search=qwen+llama)), Deepseek, all Llama, Mistral derived archs.\n", "* We support 16bit LoRA or 4bit QLoRA. Both 2x faster.\n", "* `max_seq_length` can be set to anything, since we do automatic RoPE Scaling via [kaiokendev's](https://kaiokendev.github.io/til) method.\n", "* With [PR 26037](https://github.com/huggingface/transformers/pull/26037), we support downloading 4bit models **4x faster**! [Our repo](https://huggingface.co/unsloth) has Llama, Mistral 4bit models.\n", "* [**NEW**] We make Gemma 6 trillion tokens **2.5x faster**! See our [Gemma notebook](https://colab.research.google.com/drive/10NbwlsRChbma1v55m8LAPYG15uQv6HLo?usp=sharing)" ] }, { "cell_type": "code", "execution_count": 2, "id": "88a40779", "metadata": { "execution": { "iopub.execute_input": "2024-03-28T00:12:44.654372Z", "iopub.status.busy": "2024-03-28T00:12:44.653667Z", "iopub.status.idle": "2024-03-28T00:13:38.483179Z", "shell.execute_reply": "2024-03-28T00:13:38.482338Z" }, "id": "QmUBVEnvCDJv", "outputId": "40383ec5-b379-4fcd-ba5c-b5656b0ff129", "papermill": { "duration": 53.95967, "end_time": "2024-03-28T00:13:38.485764", "exception": false, "start_time": "2024-03-28T00:12:44.526094", "status": "completed" }, "tags": [] }, "outputs": [ { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "58648d0ab785418089b24914c46df7a4", "version_major": 2, "version_minor": 0 }, "text/plain": [ "config.json: 0%| | 0.00/1.05k [00:00, ?B/s]" ] }, "metadata": {}, "output_type": "display_data" }, { "name": "stdout", "output_type": "stream", "text": [ "==((====))== Unsloth: Fast Mistral patching release 2024.3\n", " \\\\ /| GPU: Tesla T4. Max memory: 14.748 GB. Platform = Linux.\n", "O^O/ \\_/ \\ Pytorch: 2.2.1+cu121. CUDA = 7.5. CUDA Toolkit = 12.1.\n", "\\ / Bfloat16 = FALSE. Xformers = 0.0.25. FA = False.\n", " \"-____-\" Free Apache license: http://github.com/unslothai/unsloth\n" ] }, { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "0291f4d1f4734954946a71afef1e1519", "version_major": 2, "version_minor": 0 }, "text/plain": [ "model.safetensors: 0%| | 0.00/4.13G [00:00, ?B/s]" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "944f3bb3fc0f4ef594424d1ed90f391b", "version_major": 2, "version_minor": 0 }, "text/plain": [ "generation_config.json: 0%| | 0.00/116 [00:00, ?B/s]" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "9aeb5170934f4e7da9748e8859e40e30", "version_major": 2, "version_minor": 0 }, "text/plain": [ "tokenizer_config.json: 0%| | 0.00/971 [00:00, ?B/s]" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "92da558bad0e4330a2b4b3041ed24aad", "version_major": 2, "version_minor": 0 }, "text/plain": [ "tokenizer.model: 0%| | 0.00/493k [00:00, ?B/s]" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "59233cb5bb5849d0800cde9d3c129184", "version_major": 2, "version_minor": 0 }, "text/plain": [ "special_tokens_map.json: 0%| | 0.00/438 [00:00, ?B/s]" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "84af480c5f6c4ad490074ef103af5628", "version_major": 2, "version_minor": 0 }, "text/plain": [ "tokenizer.json: 0%| | 0.00/1.80M [00:00, ?B/s]" ] }, "metadata": {}, "output_type": "display_data" }, { "name": "stderr", "output_type": "stream", "text": [ "2024-03-28 00:13:27.566798: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", "2024-03-28 00:13:27.566934: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", "2024-03-28 00:13:27.741422: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n" ] } ], "source": [ "from unsloth import FastLanguageModel\n", "import torch\n", "max_seq_length = 2048 # Choose any! We auto support RoPE Scaling internally!\n", "dtype = None # None for auto detection. Float16 for Tesla T4, V100, Bfloat16 for Ampere+\n", "load_in_4bit = True # Use 4bit quantization to reduce memory usage. Can be False.\n", "\n", "# 4bit pre quantized models we support for 4x faster downloading + no OOMs.\n", "fourbit_models = [\n", " \"unsloth/mistral-7b-bnb-4bit\",\n", " \"unsloth/mistral-7b-instruct-v0.2-bnb-4bit\",\n", " \"unsloth/llama-2-7b-bnb-4bit\",\n", " \"unsloth/llama-2-13b-bnb-4bit\",\n", " \"unsloth/codellama-34b-bnb-4bit\",\n", " \"unsloth/tinyllama-bnb-4bit\",\n", " \"unsloth/gemma-7b-bnb-4bit\", # New Google 6 trillion tokens model 2.5x faster!\n", " \"unsloth/gemma-2b-bnb-4bit\",\n", "] # More models at https://huggingface.co/unsloth\n", "\n", "model, tokenizer = FastLanguageModel.from_pretrained(\n", " model_name = \"unsloth/mistral-7b-bnb-4bit\", # Choose ANY! eg teknium/OpenHermes-2.5-Mistral-7B\n", " max_seq_length = max_seq_length,\n", " dtype = dtype,\n", " load_in_4bit = load_in_4bit,\n", " # token = \"hf_...\", # use one if using gated models like meta-llama/Llama-2-7b-hf\n", ")" ] }, { "cell_type": "markdown", "id": "ca908244", "metadata": { "id": "SXd9bTZd1aaL", "papermill": { "duration": 0.12735, "end_time": "2024-03-28T00:13:38.741441", "exception": false, "start_time": "2024-03-28T00:13:38.614091", "status": "completed" }, "tags": [] }, "source": [ "We now add LoRA adapters so we only need to update 1 to 10% of all parameters!" ] }, { "cell_type": "code", "execution_count": 3, "id": "9a50c1ab", "metadata": { "execution": { "iopub.execute_input": "2024-03-28T00:13:38.993225Z", "iopub.status.busy": "2024-03-28T00:13:38.992478Z", "iopub.status.idle": "2024-03-28T00:13:39.865675Z", "shell.execute_reply": "2024-03-28T00:13:39.864586Z" }, "id": "6bZsfBuZDeCL", "outputId": "4c986b9b-ee42-48d6-ba35-6a709e919c82", "papermill": { "duration": 1.001126, "end_time": "2024-03-28T00:13:39.869351", "exception": false, "start_time": "2024-03-28T00:13:38.868225", "status": "completed" }, "tags": [] }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Unsloth 2024.3 patched 32 layers with 32 QKV layers, 32 O layers and 32 MLP layers.\n" ] } ], "source": [ "model = FastLanguageModel.get_peft_model(\n", " model,\n", " r = 16, # Choose any number > 0 ! Suggested 8, 16, 32, 64, 128\n", " target_modules = [\"q_proj\", \"k_proj\", \"v_proj\", \"o_proj\",\n", " \"gate_proj\", \"up_proj\", \"down_proj\",],\n", " lora_alpha = 16,\n", " lora_dropout = 0, # Supports any, but = 0 is optimized\n", " bias = \"none\", # Supports any, but = \"none\" is optimized\n", " use_gradient_checkpointing = True,\n", " random_state = 3407,\n", " use_rslora = False, # We support rank stabilized LoRA\n", " loftq_config = None, # And LoftQ\n", ")" ] }, { "cell_type": "markdown", "id": "0b7c4848", "metadata": { "id": "vITh0KVJ10qX", "papermill": { "duration": 0.124172, "end_time": "2024-03-28T00:13:40.129776", "exception": false, "start_time": "2024-03-28T00:13:40.005604", "status": "completed" }, "tags": [] }, "source": [ "\n", "### Data Prep\n", "We now use the `ChatML` format for conversation style finetunes. We use [Open Assistant conversations](https://huggingface.co/datasets/philschmid/guanaco-sharegpt-style) in ShareGPT style. ChatML renders multi turn conversations like below:\n", "\n", "```\n", "<|im_start|>system\n", "You are a helpful assistant.<|im_end|>\n", "<|im_start|>user\n", "What's the capital of France?<|im_end|>\n", "<|im_start|>assistant\n", "Paris.\n", "```\n", "\n", "**[NOTE]** To train only on completions (ignoring the user's input) read TRL's docs [here](https://huggingface.co/docs/trl/sft_trainer#train-on-completions-only).\n", "\n", "We use our `get_chat_template` function to get the correct chat template. We support `zephyr, chatml, mistral, llama, alpaca, vicuna, vicuna_old` and our own optimized `unsloth` template.\n", "\n", "Normally one has to train `<|im_start|>` and `<|im_end|>`. We instead map `<|im_end|>` to be the EOS token, and leave `<|im_start|>` as is. This requires no additional training of additional tokens.\n", "\n", "Note ShareGPT uses `{\"from\": \"human\", \"value\" : \"Hi\"}` and not `{\"role\": \"user\", \"content\" : \"Hi\"}`, so we use `mapping` to map it.\n", "\n", "For text completions like novel writing, try this [notebook](https://colab.research.google.com/drive/1ef-tab5bhkvWmBOObepl1WgJvfvSzn5Q?usp=sharing)." ] }, { "cell_type": "code", "execution_count": 4, "id": "0d33d99d", "metadata": { "execution": { "iopub.execute_input": "2024-03-28T00:13:40.375057Z", "iopub.status.busy": "2024-03-28T00:13:40.374718Z", "iopub.status.idle": "2024-03-28T00:13:41.633504Z", "shell.execute_reply": "2024-03-28T00:13:41.632366Z" }, "id": "LjY75GoYUCB8", "outputId": "50c7b539-b750-4964-fa4a-45a99d5923f1", "papermill": { "duration": 1.382761, "end_time": "2024-03-28T00:13:41.635817", "exception": false, "start_time": "2024-03-28T00:13:40.253056", "status": "completed" }, "tags": [] }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Unsloth: Will map <|im_end|> to EOS = .\n" ] } ], "source": [ "from datasets import load_dataset\n", "import json\n", "from unsloth.chat_templates import get_chat_template\n", "\n", "tokenizer = get_chat_template(\n", " tokenizer,\n", " chat_template = \"chatml\", # Supports zephyr, chatml, mistral, llama, alpaca, vicuna, vicuna_old, unsloth\n", " #mapping = {\"role\" : \"from\", \"content\" : \"value\", \"user\" : \"human\", \"assistant\" : \"gpt\"}, # ShareGPT style\n", " map_eos_token = True, # Maps <|im_end|> to instead\n", ")\n", "\n", "def formatting_prompts_func(convos):\n", " texts = [tokenizer.apply_chat_template(convo, tokenize = False, add_generation_prompt = False) for convo in convos]\n", " return { \"text\" : texts, }\n", "\n", "with open(\"/kaggle/input/the-group-chat/output-10k-c.json\") as chatfile:\n", " convos = [json.loads(j) for j in chatfile.readlines()]\n", "\n", "dataset = formatting_prompts_func(convos)" ] }, { "cell_type": "markdown", "id": "f75a3f33", "metadata": { "id": "cHiVoToneynS", "papermill": { "duration": 0.127199, "end_time": "2024-03-28T00:13:41.890438", "exception": false, "start_time": "2024-03-28T00:13:41.763239", "status": "completed" }, "tags": [] }, "source": [ "Let's see how the `ChatML` format works by printing the 5th element" ] }, { "cell_type": "code", "execution_count": 5, "id": "08ef098f", "metadata": { "execution": { "iopub.execute_input": "2024-03-28T00:13:42.144988Z", "iopub.status.busy": "2024-03-28T00:13:42.144281Z", "iopub.status.idle": "2024-03-28T00:13:42.148878Z", "shell.execute_reply": "2024-03-28T00:13:42.147833Z" }, "id": "U5iEWrUkevpE", "outputId": "e28b6889-29f9-400f-a08c-5fc7d5cbc5db", "papermill": { "duration": 0.133687, "end_time": "2024-03-28T00:13:42.150735", "exception": false, "start_time": "2024-03-28T00:13:42.017048", "status": "completed" }, "tags": [] }, "outputs": [], "source": [ "#dataset[5][\"conversations\"]\n", "#print(dataset[\"text\"])" ] }, { "cell_type": "markdown", "id": "a77a6d20", "metadata": { "id": "GuKOAUDpUeDL", "papermill": { "duration": 0.121878, "end_time": "2024-03-28T00:13:42.399195", "exception": false, "start_time": "2024-03-28T00:13:42.277317", "status": "completed" }, "tags": [] }, "source": [ "If you're looking to make your own chat template, that also is possible! You must use the Jinja templating regime. We provide our own stripped down version of the `Unsloth template` which we find to be more efficient, and leverages ChatML, Zephyr and Alpaca styles.\n", "\n", "More info on chat templates on [our wiki page!](https://github.com/unslothai/unsloth/wiki#chat-templates)" ] }, { "cell_type": "code", "execution_count": 6, "id": "cdd24991", "metadata": { "execution": { "iopub.execute_input": "2024-03-28T00:13:42.653294Z", "iopub.status.busy": "2024-03-28T00:13:42.652894Z", "iopub.status.idle": "2024-03-28T00:13:42.658835Z", "shell.execute_reply": "2024-03-28T00:13:42.657902Z" }, "id": "p31Z-S6FUieB", "papermill": { "duration": 0.136303, "end_time": "2024-03-28T00:13:42.660931", "exception": false, "start_time": "2024-03-28T00:13:42.524628", "status": "completed" }, "tags": [] }, "outputs": [], "source": [ "unsloth_template = \\\n", " \"{{ bos_token }}\"\\\n", " \"{{ 'You are a helpful assistant to the user\\n' }}\"\\\n", " \"{% endif %}\"\\\n", " \"{% for message in messages %}\"\\\n", " \"{% if message['role'] == 'user' %}\"\\\n", " \"{{ '>>> User: ' + message['content'] + '\\n' }}\"\\\n", " \"{% elif message['role'] == 'assistant' %}\"\\\n", " \"{{ '>>> Assistant: ' + message['content'] + eos_token + '\\n' }}\"\\\n", " \"{% endif %}\"\\\n", " \"{% endfor %}\"\\\n", " \"{% if add_generation_prompt %}\"\\\n", " \"{{ '>>> Assistant: ' }}\"\\\n", " \"{% endif %}\"\n", "unsloth_eos_token = \"eos_token\"\n", "\n", "if False:\n", " tokenizer = get_chat_template(\n", " tokenizer,\n", " chat_template = (unsloth_template, unsloth_eos_token,), # You must provide a template and EOS token\n", " mapping = {\"role\" : \"from\", \"content\" : \"value\", \"user\" : \"human\", \"assistant\" : \"gpt\"}, # ShareGPT style\n", " map_eos_token = True, # Maps <|im_end|> to instead\n", " )" ] }, { "cell_type": "markdown", "id": "44e5c271", "metadata": { "id": "idAEIeSQ3xdS", "papermill": { "duration": 0.127599, "end_time": "2024-03-28T00:13:42.915115", "exception": false, "start_time": "2024-03-28T00:13:42.787516", "status": "completed" }, "tags": [] }, "source": [ "\n", "### Train the model\n", "Now let's use Huggingface TRL's `SFTTrainer`! More docs here: [TRL SFT docs](https://huggingface.co/docs/trl/sft_trainer). We do 60 steps to speed things up, but you can set `num_train_epochs=1` for a full run, and turn off `max_steps=None`. We also support TRL's `DPOTrainer`!" ] }, { "cell_type": "code", "execution_count": 7, "id": "84d94e51", "metadata": { "execution": { "iopub.execute_input": "2024-03-28T00:13:43.163495Z", "iopub.status.busy": "2024-03-28T00:13:43.162623Z", "iopub.status.idle": "2024-03-28T00:13:43.243458Z", "shell.execute_reply": "2024-03-28T00:13:43.242622Z" }, "papermill": { "duration": 0.20747, "end_time": "2024-03-28T00:13:43.245965", "exception": false, "start_time": "2024-03-28T00:13:43.038495", "status": "completed" }, "tags": [] }, "outputs": [], "source": [ "from datasets import Dataset\n", "dataset = Dataset.from_dict(dataset)" ] }, { "cell_type": "code", "execution_count": 8, "id": "099afa9e", "metadata": { "execution": { "iopub.execute_input": "2024-03-28T00:13:43.492984Z", "iopub.status.busy": "2024-03-28T00:13:43.492622Z", "iopub.status.idle": "2024-03-28T00:13:48.324291Z", "shell.execute_reply": "2024-03-28T00:13:48.323307Z" }, "id": "95_Nn-89DhsL", "outputId": "c13d3e90-5342-4535-9541-98f9120dfe2b", "papermill": { "duration": 4.95752, "end_time": "2024-03-28T00:13:48.326701", "exception": false, "start_time": "2024-03-28T00:13:43.369181", "status": "completed" }, "tags": [] }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Using the `WANDB_DISABLED` environment variable is deprecated and will be removed in v5. Use the --report_to flag to control the integrations used for logging result (for instance --report_to none).\n" ] }, { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "e363d483a5134f5d873c11f936d2d9f5", "version_major": 2, "version_minor": 0 }, "text/plain": [ "Map (num_proc=2): 0%| | 0/10000 [00:00, ? examples/s]" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "from trl import SFTTrainer\n", "from transformers import TrainingArguments\n", "\n", "trainer = SFTTrainer(\n", " model = model,\n", " tokenizer = tokenizer,\n", " train_dataset = dataset,\n", " dataset_text_field = \"text\",\n", " max_seq_length = max_seq_length,\n", " dataset_num_proc = 2,\n", " packing = False, # Can make training 5x faster for short sequences.\n", " args = TrainingArguments(\n", " per_device_train_batch_size = 2,\n", " gradient_accumulation_steps = 4,\n", " warmup_steps = 5,\n", " num_train_epochs=1,\n", " learning_rate = 2e-4,\n", " fp16 = not torch.cuda.is_bf16_supported(),\n", " bf16 = torch.cuda.is_bf16_supported(),\n", " logging_steps = 1,\n", " optim = \"adamw_8bit\",\n", " weight_decay = 0.01,\n", " lr_scheduler_type = \"linear\",\n", " seed = 3407,\n", " output_dir = \"outputs\",\n", " ),\n", ")" ] }, { "cell_type": "code", "execution_count": 9, "id": "56281856", "metadata": { "cellView": "form", "execution": { "iopub.execute_input": "2024-03-28T00:13:48.575758Z", "iopub.status.busy": "2024-03-28T00:13:48.575334Z", "iopub.status.idle": "2024-03-28T00:13:48.582620Z", "shell.execute_reply": "2024-03-28T00:13:48.581689Z" }, "id": "2ejIt2xSNKKp", "outputId": "a537db02-e673-44da-8889-5fa95a5e2d51", "papermill": { "duration": 0.137429, "end_time": "2024-03-28T00:13:48.585471", "exception": false, "start_time": "2024-03-28T00:13:48.448042", "status": "completed" }, "tags": [] }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "GPU = Tesla T4. Max memory = 14.748 GB.\n", "4.5 GB of memory reserved.\n" ] } ], "source": [ "#@title Show current memory stats\n", "gpu_stats = torch.cuda.get_device_properties(0)\n", "start_gpu_memory = round(torch.cuda.max_memory_reserved() / 1024 / 1024 / 1024, 3)\n", "max_memory = round(gpu_stats.total_memory / 1024 / 1024 / 1024, 3)\n", "print(f\"GPU = {gpu_stats.name}. Max memory = {max_memory} GB.\")\n", "print(f\"{start_gpu_memory} GB of memory reserved.\")" ] }, { "cell_type": "code", "execution_count": 10, "id": "a4e1702c", "metadata": { "execution": { "iopub.execute_input": "2024-03-28T00:13:48.854943Z", "iopub.status.busy": "2024-03-28T00:13:48.854292Z", "iopub.status.idle": "2024-03-28T03:52:49.428064Z", "shell.execute_reply": "2024-03-28T03:52:49.427099Z" }, "id": "yqxqAZ7KJ4oL", "outputId": "db7bae40-bf0a-4908-8867-a5dfe933e1f3", "papermill": { "duration": 13140.716117, "end_time": "2024-03-28T03:52:49.430510", "exception": false, "start_time": "2024-03-28T00:13:48.714393", "status": "completed" }, "tags": [] }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "==((====))== Unsloth - 2x faster free finetuning | Num GPUs = 1\n", " \\\\ /| Num examples = 10,000 | Num Epochs = 1\n", "O^O/ \\_/ \\ Batch size per device = 2 | Gradient Accumulation steps = 4\n", "\\ / Total batch size = 8 | Total steps = 1,250\n", " \"-____-\" Number of trainable parameters = 41,943,040\n" ] }, { "data": { "text/html": [ "\n", "Step | \n", "Training Loss | \n", "
---|---|
1 | \n", "2.415600 | \n", "
2 | \n", "2.560600 | \n", "
3 | \n", "2.358100 | \n", "
4 | \n", "2.018800 | \n", "
5 | \n", "1.869800 | \n", "
6 | \n", "1.859900 | \n", "
7 | \n", "1.855700 | \n", "
8 | \n", "1.985000 | \n", "
9 | \n", "1.739100 | \n", "
10 | \n", "1.857900 | \n", "
11 | \n", "1.858300 | \n", "
12 | \n", "1.574900 | \n", "
13 | \n", "1.680000 | \n", "
14 | \n", "1.615100 | \n", "
15 | \n", "1.720000 | \n", "
16 | \n", "1.731600 | \n", "
17 | \n", "1.727100 | \n", "
18 | \n", "1.587100 | \n", "
19 | \n", "1.579300 | \n", "
20 | \n", "1.642300 | \n", "
21 | \n", "1.487200 | \n", "
22 | \n", "1.585400 | \n", "
23 | \n", "1.611900 | \n", "
24 | \n", "1.598700 | \n", "
25 | \n", "1.617600 | \n", "
26 | \n", "1.511700 | \n", "
27 | \n", "1.805500 | \n", "
28 | \n", "1.569000 | \n", "
29 | \n", "1.652700 | \n", "
30 | \n", "1.421700 | \n", "
31 | \n", "1.666500 | \n", "
32 | \n", "1.633400 | \n", "
33 | \n", "1.630900 | \n", "
34 | \n", "1.744100 | \n", "
35 | \n", "1.577500 | \n", "
36 | \n", "1.665400 | \n", "
37 | \n", "1.569500 | \n", "
38 | \n", "1.597500 | \n", "
39 | \n", "1.703800 | \n", "
40 | \n", "1.556500 | \n", "
41 | \n", "1.451800 | \n", "
42 | \n", "1.629500 | \n", "
43 | \n", "1.538500 | \n", "
44 | \n", "1.508600 | \n", "
45 | \n", "1.439400 | \n", "
46 | \n", "1.590000 | \n", "
47 | \n", "1.568200 | \n", "
48 | \n", "1.554900 | \n", "
49 | \n", "1.486900 | \n", "
50 | \n", "1.617100 | \n", "
51 | \n", "1.695700 | \n", "
52 | \n", "1.470600 | \n", "
53 | \n", "1.680400 | \n", "
54 | \n", "1.605500 | \n", "
55 | \n", "1.472900 | \n", "
56 | \n", "1.636600 | \n", "
57 | \n", "1.527600 | \n", "
58 | \n", "1.579300 | \n", "
59 | \n", "1.551700 | \n", "
60 | \n", "1.503900 | \n", "
61 | \n", "1.364500 | \n", "
62 | \n", "1.575300 | \n", "
63 | \n", "1.516700 | \n", "
64 | \n", "1.632000 | \n", "
65 | \n", "1.430900 | \n", "
66 | \n", "1.542000 | \n", "
67 | \n", "1.609800 | \n", "
68 | \n", "1.647700 | \n", "
69 | \n", "1.478100 | \n", "
70 | \n", "1.328200 | \n", "
71 | \n", "1.725000 | \n", "
72 | \n", "1.522400 | \n", "
73 | \n", "1.557200 | \n", "
74 | \n", "1.670000 | \n", "
75 | \n", "1.648900 | \n", "
76 | \n", "1.670400 | \n", "
77 | \n", "1.615300 | \n", "
78 | \n", "1.541800 | \n", "
79 | \n", "1.549200 | \n", "
80 | \n", "1.544500 | \n", "
81 | \n", "1.423300 | \n", "
82 | \n", "1.300900 | \n", "
83 | \n", "1.626600 | \n", "
84 | \n", "1.585000 | \n", "
85 | \n", "1.444500 | \n", "
86 | \n", "1.598200 | \n", "
87 | \n", "1.541000 | \n", "
88 | \n", "1.429500 | \n", "
89 | \n", "1.517300 | \n", "
90 | \n", "1.539100 | \n", "
91 | \n", "1.604200 | \n", "
92 | \n", "1.504300 | \n", "
93 | \n", "1.520200 | \n", "
94 | \n", "1.459000 | \n", "
95 | \n", "1.619900 | \n", "
96 | \n", "1.629000 | \n", "
97 | \n", "1.507000 | \n", "
98 | \n", "1.455300 | \n", "
99 | \n", "1.461700 | \n", "
100 | \n", "1.513500 | \n", "
101 | \n", "1.521500 | \n", "
102 | \n", "1.658100 | \n", "
103 | \n", "1.579500 | \n", "
104 | \n", "1.430100 | \n", "
105 | \n", "1.591500 | \n", "
106 | \n", "1.620900 | \n", "
107 | \n", "1.681300 | \n", "
108 | \n", "1.662900 | \n", "
109 | \n", "1.717200 | \n", "
110 | \n", "1.656000 | \n", "
111 | \n", "1.545400 | \n", "
112 | \n", "1.434400 | \n", "
113 | \n", "1.665900 | \n", "
114 | \n", "1.483000 | \n", "
115 | \n", "1.411300 | \n", "
116 | \n", "1.549000 | \n", "
117 | \n", "1.627200 | \n", "
118 | \n", "1.608600 | \n", "
119 | \n", "1.549700 | \n", "
120 | \n", "1.560800 | \n", "
121 | \n", "1.581400 | \n", "
122 | \n", "1.586100 | \n", "
123 | \n", "1.442700 | \n", "
124 | \n", "1.666800 | \n", "
125 | \n", "1.563900 | \n", "
126 | \n", "1.550300 | \n", "
127 | \n", "1.475600 | \n", "
128 | \n", "1.470400 | \n", "
129 | \n", "1.605000 | \n", "
130 | \n", "1.546100 | \n", "
131 | \n", "1.552900 | \n", "
132 | \n", "1.562300 | \n", "
133 | \n", "1.468900 | \n", "
134 | \n", "1.368200 | \n", "
135 | \n", "1.545800 | \n", "
136 | \n", "1.519900 | \n", "
137 | \n", "1.646300 | \n", "
138 | \n", "1.588800 | \n", "
139 | \n", "1.550300 | \n", "
140 | \n", "1.484800 | \n", "
141 | \n", "1.581600 | \n", "
142 | \n", "1.623200 | \n", "
143 | \n", "1.664700 | \n", "
144 | \n", "1.538800 | \n", "
145 | \n", "1.662800 | \n", "
146 | \n", "1.593500 | \n", "
147 | \n", "1.419500 | \n", "
148 | \n", "1.656200 | \n", "
149 | \n", "1.479400 | \n", "
150 | \n", "1.512500 | \n", "
151 | \n", "1.528800 | \n", "
152 | \n", "1.500800 | \n", "
153 | \n", "1.597800 | \n", "
154 | \n", "1.548600 | \n", "
155 | \n", "1.626200 | \n", "
156 | \n", "1.633400 | \n", "
157 | \n", "1.536100 | \n", "
158 | \n", "1.535300 | \n", "
159 | \n", "1.571300 | \n", "
160 | \n", "1.461200 | \n", "
161 | \n", "1.516200 | \n", "
162 | \n", "1.465500 | \n", "
163 | \n", "1.563900 | \n", "
164 | \n", "1.599900 | \n", "
165 | \n", "1.494400 | \n", "
166 | \n", "1.550500 | \n", "
167 | \n", "1.382100 | \n", "
168 | \n", "1.550800 | \n", "
169 | \n", "1.554000 | \n", "
170 | \n", "1.499200 | \n", "
171 | \n", "1.619500 | \n", "
172 | \n", "1.571800 | \n", "
173 | \n", "1.552700 | \n", "
174 | \n", "1.360500 | \n", "
175 | \n", "1.457600 | \n", "
176 | \n", "1.528500 | \n", "
177 | \n", "1.450600 | \n", "
178 | \n", "1.497100 | \n", "
179 | \n", "1.415400 | \n", "
180 | \n", "1.549900 | \n", "
181 | \n", "1.459800 | \n", "
182 | \n", "1.653100 | \n", "
183 | \n", "1.255300 | \n", "
184 | \n", "1.511100 | \n", "
185 | \n", "1.487700 | \n", "
186 | \n", "1.678500 | \n", "
187 | \n", "1.566400 | \n", "
188 | \n", "1.479300 | \n", "
189 | \n", "1.503900 | \n", "
190 | \n", "1.493700 | \n", "
191 | \n", "1.468400 | \n", "
192 | \n", "1.499400 | \n", "
193 | \n", "1.462300 | \n", "
194 | \n", "1.606200 | \n", "
195 | \n", "1.726000 | \n", "
196 | \n", "1.424700 | \n", "
197 | \n", "1.560500 | \n", "
198 | \n", "1.572200 | \n", "
199 | \n", "1.694600 | \n", "
200 | \n", "1.508900 | \n", "
201 | \n", "1.465600 | \n", "
202 | \n", "1.533500 | \n", "
203 | \n", "1.531400 | \n", "
204 | \n", "1.543200 | \n", "
205 | \n", "1.546500 | \n", "
206 | \n", "1.568600 | \n", "
207 | \n", "1.437200 | \n", "
208 | \n", "1.524100 | \n", "
209 | \n", "1.644300 | \n", "
210 | \n", "1.412500 | \n", "
211 | \n", "1.604700 | \n", "
212 | \n", "1.538300 | \n", "
213 | \n", "1.552600 | \n", "
214 | \n", "1.654100 | \n", "
215 | \n", "1.632300 | \n", "
216 | \n", "1.634200 | \n", "
217 | \n", "1.562400 | \n", "
218 | \n", "1.528000 | \n", "
219 | \n", "1.444400 | \n", "
220 | \n", "1.449800 | \n", "
221 | \n", "1.561900 | \n", "
222 | \n", "1.565400 | \n", "
223 | \n", "1.526800 | \n", "
224 | \n", "1.422900 | \n", "
225 | \n", "1.514200 | \n", "
226 | \n", "1.663700 | \n", "
227 | \n", "1.402100 | \n", "
228 | \n", "1.536400 | \n", "
229 | \n", "1.411200 | \n", "
230 | \n", "1.582300 | \n", "
231 | \n", "1.489300 | \n", "
232 | \n", "1.531800 | \n", "
233 | \n", "1.509500 | \n", "
234 | \n", "1.514100 | \n", "
235 | \n", "1.503800 | \n", "
236 | \n", "1.558800 | \n", "
237 | \n", "1.433500 | \n", "
238 | \n", "1.593100 | \n", "
239 | \n", "1.442500 | \n", "
240 | \n", "1.458900 | \n", "
241 | \n", "1.609300 | \n", "
242 | \n", "1.368500 | \n", "
243 | \n", "1.488500 | \n", "
244 | \n", "1.495500 | \n", "
245 | \n", "1.587800 | \n", "
246 | \n", "1.597700 | \n", "
247 | \n", "1.337800 | \n", "
248 | \n", "1.527200 | \n", "
249 | \n", "1.343900 | \n", "
250 | \n", "1.376000 | \n", "
251 | \n", "1.506100 | \n", "
252 | \n", "1.415800 | \n", "
253 | \n", "1.528500 | \n", "
254 | \n", "1.499300 | \n", "
255 | \n", "1.605400 | \n", "
256 | \n", "1.471000 | \n", "
257 | \n", "1.507400 | \n", "
258 | \n", "1.471800 | \n", "
259 | \n", "1.460100 | \n", "
260 | \n", "1.623500 | \n", "
261 | \n", "1.470000 | \n", "
262 | \n", "1.317300 | \n", "
263 | \n", "1.381800 | \n", "
264 | \n", "1.381500 | \n", "
265 | \n", "1.475200 | \n", "
266 | \n", "1.511700 | \n", "
267 | \n", "1.524100 | \n", "
268 | \n", "1.487300 | \n", "
269 | \n", "1.331600 | \n", "
270 | \n", "1.479500 | \n", "
271 | \n", "1.474400 | \n", "
272 | \n", "1.530400 | \n", "
273 | \n", "1.520800 | \n", "
274 | \n", "1.613700 | \n", "
275 | \n", "1.543800 | \n", "
276 | \n", "1.588600 | \n", "
277 | \n", "1.462600 | \n", "
278 | \n", "1.433200 | \n", "
279 | \n", "1.508600 | \n", "
280 | \n", "1.401300 | \n", "
281 | \n", "1.486700 | \n", "
282 | \n", "1.590800 | \n", "
283 | \n", "1.455800 | \n", "
284 | \n", "1.442800 | \n", "
285 | \n", "1.660000 | \n", "
286 | \n", "1.642900 | \n", "
287 | \n", "1.431400 | \n", "
288 | \n", "1.575100 | \n", "
289 | \n", "1.557800 | \n", "
290 | \n", "1.553200 | \n", "
291 | \n", "1.541500 | \n", "
292 | \n", "1.531600 | \n", "
293 | \n", "1.489800 | \n", "
294 | \n", "1.561100 | \n", "
295 | \n", "1.524400 | \n", "
296 | \n", "1.421400 | \n", "
297 | \n", "1.466800 | \n", "
298 | \n", "1.526200 | \n", "
299 | \n", "1.411400 | \n", "
300 | \n", "1.428100 | \n", "
301 | \n", "1.464500 | \n", "
302 | \n", "1.460000 | \n", "
303 | \n", "1.522700 | \n", "
304 | \n", "1.533100 | \n", "
305 | \n", "1.464400 | \n", "
306 | \n", "1.545100 | \n", "
307 | \n", "1.506800 | \n", "
308 | \n", "1.508500 | \n", "
309 | \n", "1.576900 | \n", "
310 | \n", "1.587500 | \n", "
311 | \n", "1.397800 | \n", "
312 | \n", "1.478100 | \n", "
313 | \n", "1.484200 | \n", "
314 | \n", "1.428500 | \n", "
315 | \n", "1.520700 | \n", "
316 | \n", "1.464100 | \n", "
317 | \n", "1.412200 | \n", "
318 | \n", "1.493000 | \n", "
319 | \n", "1.514200 | \n", "
320 | \n", "1.538200 | \n", "
321 | \n", "1.537100 | \n", "
322 | \n", "1.470500 | \n", "
323 | \n", "1.361800 | \n", "
324 | \n", "1.540100 | \n", "
325 | \n", "1.583800 | \n", "
326 | \n", "1.411400 | \n", "
327 | \n", "1.585000 | \n", "
328 | \n", "1.561200 | \n", "
329 | \n", "1.441400 | \n", "
330 | \n", "1.443100 | \n", "
331 | \n", "1.487900 | \n", "
332 | \n", "1.441400 | \n", "
333 | \n", "1.502100 | \n", "
334 | \n", "1.680100 | \n", "
335 | \n", "1.718200 | \n", "
336 | \n", "1.613200 | \n", "
337 | \n", "1.428600 | \n", "
338 | \n", "1.659800 | \n", "
339 | \n", "1.550100 | \n", "
340 | \n", "1.479900 | \n", "
341 | \n", "1.512500 | \n", "
342 | \n", "1.371900 | \n", "
343 | \n", "1.418200 | \n", "
344 | \n", "1.605200 | \n", "
345 | \n", "1.455900 | \n", "
346 | \n", "1.413300 | \n", "
347 | \n", "1.463400 | \n", "
348 | \n", "1.459700 | \n", "
349 | \n", "1.473400 | \n", "
350 | \n", "1.467900 | \n", "
351 | \n", "1.424800 | \n", "
352 | \n", "1.607200 | \n", "
353 | \n", "1.697500 | \n", "
354 | \n", "1.510900 | \n", "
355 | \n", "1.606700 | \n", "
356 | \n", "1.639400 | \n", "
357 | \n", "1.460200 | \n", "
358 | \n", "1.456100 | \n", "
359 | \n", "1.393600 | \n", "
360 | \n", "1.477500 | \n", "
361 | \n", "1.438100 | \n", "
362 | \n", "1.412900 | \n", "
363 | \n", "1.564800 | \n", "
364 | \n", "1.423000 | \n", "
365 | \n", "1.517000 | \n", "
366 | \n", "1.378000 | \n", "
367 | \n", "1.541300 | \n", "
368 | \n", "1.426400 | \n", "
369 | \n", "1.512400 | \n", "
370 | \n", "1.470800 | \n", "
371 | \n", "1.514200 | \n", "
372 | \n", "1.480300 | \n", "
373 | \n", "1.489100 | \n", "
374 | \n", "1.546200 | \n", "
375 | \n", "1.481200 | \n", "
376 | \n", "1.476000 | \n", "
377 | \n", "1.385400 | \n", "
378 | \n", "1.613200 | \n", "
379 | \n", "1.245500 | \n", "
380 | \n", "1.312100 | \n", "
381 | \n", "1.396700 | \n", "
382 | \n", "1.501400 | \n", "
383 | \n", "1.405100 | \n", "
384 | \n", "1.481700 | \n", "
385 | \n", "1.520400 | \n", "
386 | \n", "1.596300 | \n", "
387 | \n", "1.585500 | \n", "
388 | \n", "1.557700 | \n", "
389 | \n", "1.432000 | \n", "
390 | \n", "1.627200 | \n", "
391 | \n", "1.498900 | \n", "
392 | \n", "1.583700 | \n", "
393 | \n", "1.411800 | \n", "
394 | \n", "1.454600 | \n", "
395 | \n", "1.532200 | \n", "
396 | \n", "1.443000 | \n", "
397 | \n", "1.358000 | \n", "
398 | \n", "1.400200 | \n", "
399 | \n", "1.493300 | \n", "
400 | \n", "1.387900 | \n", "
401 | \n", "1.430900 | \n", "
402 | \n", "1.485400 | \n", "
403 | \n", "1.757100 | \n", "
404 | \n", "1.606100 | \n", "
405 | \n", "1.570100 | \n", "
406 | \n", "1.600700 | \n", "
407 | \n", "1.489300 | \n", "
408 | \n", "1.570900 | \n", "
409 | \n", "1.442300 | \n", "
410 | \n", "1.504900 | \n", "
411 | \n", "1.406900 | \n", "
412 | \n", "1.600600 | \n", "
413 | \n", "1.362500 | \n", "
414 | \n", "1.527700 | \n", "
415 | \n", "1.509400 | \n", "
416 | \n", "1.619800 | \n", "
417 | \n", "1.367200 | \n", "
418 | \n", "1.440800 | \n", "
419 | \n", "1.523200 | \n", "
420 | \n", "1.507500 | \n", "
421 | \n", "1.473100 | \n", "
422 | \n", "1.406900 | \n", "
423 | \n", "1.417000 | \n", "
424 | \n", "1.462700 | \n", "
425 | \n", "1.536800 | \n", "
426 | \n", "1.545300 | \n", "
427 | \n", "1.457400 | \n", "
428 | \n", "1.471200 | \n", "
429 | \n", "1.470500 | \n", "
430 | \n", "1.550000 | \n", "
431 | \n", "1.517700 | \n", "
432 | \n", "1.552500 | \n", "
433 | \n", "1.564900 | \n", "
434 | \n", "1.662400 | \n", "
435 | \n", "1.484900 | \n", "
436 | \n", "1.381200 | \n", "
437 | \n", "1.505900 | \n", "
438 | \n", "1.439100 | \n", "
439 | \n", "1.343900 | \n", "
440 | \n", "1.508700 | \n", "
441 | \n", "1.525400 | \n", "
442 | \n", "1.434000 | \n", "
443 | \n", "1.470400 | \n", "
444 | \n", "1.544200 | \n", "
445 | \n", "1.380300 | \n", "
446 | \n", "1.475500 | \n", "
447 | \n", "1.653600 | \n", "
448 | \n", "1.636300 | \n", "
449 | \n", "1.525200 | \n", "
450 | \n", "1.500500 | \n", "
451 | \n", "1.438000 | \n", "
452 | \n", "1.488800 | \n", "
453 | \n", "1.396300 | \n", "
454 | \n", "1.440200 | \n", "
455 | \n", "1.482000 | \n", "
456 | \n", "1.461400 | \n", "
457 | \n", "1.471400 | \n", "
458 | \n", "1.315300 | \n", "
459 | \n", "1.587200 | \n", "
460 | \n", "1.452000 | \n", "
461 | \n", "1.718700 | \n", "
462 | \n", "1.414400 | \n", "
463 | \n", "1.514500 | \n", "
464 | \n", "1.492100 | \n", "
465 | \n", "1.581400 | \n", "
466 | \n", "1.425000 | \n", "
467 | \n", "1.476900 | \n", "
468 | \n", "1.403700 | \n", "
469 | \n", "1.438700 | \n", "
470 | \n", "1.563300 | \n", "
471 | \n", "1.475600 | \n", "
472 | \n", "1.610700 | \n", "
473 | \n", "1.348700 | \n", "
474 | \n", "1.470000 | \n", "
475 | \n", "1.615400 | \n", "
476 | \n", "1.446700 | \n", "
477 | \n", "1.394500 | \n", "
478 | \n", "1.470600 | \n", "
479 | \n", "1.397700 | \n", "
480 | \n", "1.377500 | \n", "
481 | \n", "1.504900 | \n", "
482 | \n", "1.485500 | \n", "
483 | \n", "1.461600 | \n", "
484 | \n", "1.520600 | \n", "
485 | \n", "1.532300 | \n", "
486 | \n", "1.627200 | \n", "
487 | \n", "1.509800 | \n", "
488 | \n", "1.387400 | \n", "
489 | \n", "1.438900 | \n", "
490 | \n", "1.440700 | \n", "
491 | \n", "1.527900 | \n", "
492 | \n", "1.478900 | \n", "
493 | \n", "1.461900 | \n", "
494 | \n", "1.624800 | \n", "
495 | \n", "1.521600 | \n", "
496 | \n", "1.406800 | \n", "
497 | \n", "1.480600 | \n", "
498 | \n", "1.602300 | \n", "
499 | \n", "1.590400 | \n", "
500 | \n", "1.622000 | \n", "
501 | \n", "1.582400 | \n", "
502 | \n", "1.548000 | \n", "
503 | \n", "1.439800 | \n", "
504 | \n", "1.406300 | \n", "
505 | \n", "1.499700 | \n", "
506 | \n", "1.389400 | \n", "
507 | \n", "1.591000 | \n", "
508 | \n", "1.453000 | \n", "
509 | \n", "1.532200 | \n", "
510 | \n", "1.482900 | \n", "
511 | \n", "1.428800 | \n", "
512 | \n", "1.575800 | \n", "
513 | \n", "1.460300 | \n", "
514 | \n", "1.530200 | \n", "
515 | \n", "1.447100 | \n", "
516 | \n", "1.621300 | \n", "
517 | \n", "1.525500 | \n", "
518 | \n", "1.528700 | \n", "
519 | \n", "1.466200 | \n", "
520 | \n", "1.488700 | \n", "
521 | \n", "1.449400 | \n", "
522 | \n", "1.537600 | \n", "
523 | \n", "1.398400 | \n", "
524 | \n", "1.316700 | \n", "
525 | \n", "1.386100 | \n", "
526 | \n", "1.603900 | \n", "
527 | \n", "1.353800 | \n", "
528 | \n", "1.306700 | \n", "
529 | \n", "1.401600 | \n", "
530 | \n", "1.380400 | \n", "
531 | \n", "1.394900 | \n", "
532 | \n", "1.498300 | \n", "
533 | \n", "1.462200 | \n", "
534 | \n", "1.458100 | \n", "
535 | \n", "1.515000 | \n", "
536 | \n", "1.483900 | \n", "
537 | \n", "1.508600 | \n", "
538 | \n", "1.612800 | \n", "
539 | \n", "1.443400 | \n", "
540 | \n", "1.455600 | \n", "
541 | \n", "1.568900 | \n", "
542 | \n", "1.547600 | \n", "
543 | \n", "1.432400 | \n", "
544 | \n", "1.583800 | \n", "
545 | \n", "1.365600 | \n", "
546 | \n", "1.596500 | \n", "
547 | \n", "1.450600 | \n", "
548 | \n", "1.485400 | \n", "
549 | \n", "1.457700 | \n", "
550 | \n", "1.390200 | \n", "
551 | \n", "1.399700 | \n", "
552 | \n", "1.417600 | \n", "
553 | \n", "1.579800 | \n", "
554 | \n", "1.472400 | \n", "
555 | \n", "1.386100 | \n", "
556 | \n", "1.439000 | \n", "
557 | \n", "1.418300 | \n", "
558 | \n", "1.444300 | \n", "
559 | \n", "1.516500 | \n", "
560 | \n", "1.550100 | \n", "
561 | \n", "1.410800 | \n", "
562 | \n", "1.560600 | \n", "
563 | \n", "1.523800 | \n", "
564 | \n", "1.489200 | \n", "
565 | \n", "1.423400 | \n", "
566 | \n", "1.436900 | \n", "
567 | \n", "1.546700 | \n", "
568 | \n", "1.393200 | \n", "
569 | \n", "1.556600 | \n", "
570 | \n", "1.446700 | \n", "
571 | \n", "1.380600 | \n", "
572 | \n", "1.340500 | \n", "
573 | \n", "1.477000 | \n", "
574 | \n", "1.367000 | \n", "
575 | \n", "1.643500 | \n", "
576 | \n", "1.448600 | \n", "
577 | \n", "1.419600 | \n", "
578 | \n", "1.568400 | \n", "
579 | \n", "1.473300 | \n", "
580 | \n", "1.650400 | \n", "
581 | \n", "1.572000 | \n", "
582 | \n", "1.499300 | \n", "
583 | \n", "1.613200 | \n", "
584 | \n", "1.566500 | \n", "
585 | \n", "1.477800 | \n", "
586 | \n", "1.507300 | \n", "
587 | \n", "1.374800 | \n", "
588 | \n", "1.480100 | \n", "
589 | \n", "1.357000 | \n", "
590 | \n", "1.328300 | \n", "
591 | \n", "1.343400 | \n", "
592 | \n", "1.470600 | \n", "
593 | \n", "1.524700 | \n", "
594 | \n", "1.420600 | \n", "
595 | \n", "1.398400 | \n", "
596 | \n", "1.498600 | \n", "
597 | \n", "1.530700 | \n", "
598 | \n", "1.520700 | \n", "
599 | \n", "1.579800 | \n", "
600 | \n", "1.559300 | \n", "
601 | \n", "1.400800 | \n", "
602 | \n", "1.489000 | \n", "
603 | \n", "1.532900 | \n", "
604 | \n", "1.507300 | \n", "
605 | \n", "1.447400 | \n", "
606 | \n", "1.527100 | \n", "
607 | \n", "1.433700 | \n", "
608 | \n", "1.533300 | \n", "
609 | \n", "1.469300 | \n", "
610 | \n", "1.504100 | \n", "
611 | \n", "1.416300 | \n", "
612 | \n", "1.601600 | \n", "
613 | \n", "1.526500 | \n", "
614 | \n", "1.491200 | \n", "
615 | \n", "1.605900 | \n", "
616 | \n", "1.561700 | \n", "
617 | \n", "1.384500 | \n", "
618 | \n", "1.561900 | \n", "
619 | \n", "1.416700 | \n", "
620 | \n", "1.484600 | \n", "
621 | \n", "1.558600 | \n", "
622 | \n", "1.449400 | \n", "
623 | \n", "1.477200 | \n", "
624 | \n", "1.557600 | \n", "
625 | \n", "1.550600 | \n", "
626 | \n", "1.575000 | \n", "
627 | \n", "1.376900 | \n", "
628 | \n", "1.557200 | \n", "
629 | \n", "1.466200 | \n", "
630 | \n", "1.390700 | \n", "
631 | \n", "1.441400 | \n", "
632 | \n", "1.526600 | \n", "
633 | \n", "1.455400 | \n", "
634 | \n", "1.310500 | \n", "
635 | \n", "1.445300 | \n", "
636 | \n", "1.431300 | \n", "
637 | \n", "1.596800 | \n", "
638 | \n", "1.520600 | \n", "
639 | \n", "1.554900 | \n", "
640 | \n", "1.456100 | \n", "
641 | \n", "1.566200 | \n", "
642 | \n", "1.507100 | \n", "
643 | \n", "1.522700 | \n", "
644 | \n", "1.482700 | \n", "
645 | \n", "1.525900 | \n", "
646 | \n", "1.327800 | \n", "
647 | \n", "1.441400 | \n", "
648 | \n", "1.412400 | \n", "
649 | \n", "1.338100 | \n", "
650 | \n", "1.466900 | \n", "
651 | \n", "1.592100 | \n", "
652 | \n", "1.473300 | \n", "
653 | \n", "1.526600 | \n", "
654 | \n", "1.484900 | \n", "
655 | \n", "1.537900 | \n", "
656 | \n", "1.368500 | \n", "
657 | \n", "1.332000 | \n", "
658 | \n", "1.545500 | \n", "
659 | \n", "1.425000 | \n", "
660 | \n", "1.487300 | \n", "
661 | \n", "1.499200 | \n", "
662 | \n", "1.461900 | \n", "
663 | \n", "1.495800 | \n", "
664 | \n", "1.432700 | \n", "
665 | \n", "1.480300 | \n", "
666 | \n", "1.452000 | \n", "
667 | \n", "1.516700 | \n", "
668 | \n", "1.465200 | \n", "
669 | \n", "1.455800 | \n", "
670 | \n", "1.402400 | \n", "
671 | \n", "1.377000 | \n", "
672 | \n", "1.540900 | \n", "
673 | \n", "1.436500 | \n", "
674 | \n", "1.597800 | \n", "
675 | \n", "1.432400 | \n", "
676 | \n", "1.417700 | \n", "
677 | \n", "1.305100 | \n", "
678 | \n", "1.543400 | \n", "
679 | \n", "1.629200 | \n", "
680 | \n", "1.404100 | \n", "
681 | \n", "1.544200 | \n", "
682 | \n", "1.552600 | \n", "
683 | \n", "1.422000 | \n", "
684 | \n", "1.477900 | \n", "
685 | \n", "1.293200 | \n", "
686 | \n", "1.411200 | \n", "
687 | \n", "1.480900 | \n", "
688 | \n", "1.486800 | \n", "
689 | \n", "1.316400 | \n", "
690 | \n", "1.466900 | \n", "
691 | \n", "1.376700 | \n", "
692 | \n", "1.440000 | \n", "
693 | \n", "1.594300 | \n", "
694 | \n", "1.482100 | \n", "
695 | \n", "1.537500 | \n", "
696 | \n", "1.543200 | \n", "
697 | \n", "1.458800 | \n", "
698 | \n", "1.493900 | \n", "
699 | \n", "1.517100 | \n", "
700 | \n", "1.408600 | \n", "
701 | \n", "1.488700 | \n", "
702 | \n", "1.363300 | \n", "
703 | \n", "1.300900 | \n", "
704 | \n", "1.488000 | \n", "
705 | \n", "1.377400 | \n", "
706 | \n", "1.526500 | \n", "
707 | \n", "1.392900 | \n", "
708 | \n", "1.536000 | \n", "
709 | \n", "1.349900 | \n", "
710 | \n", "1.447300 | \n", "
711 | \n", "1.349600 | \n", "
712 | \n", "1.548100 | \n", "
713 | \n", "1.441000 | \n", "
714 | \n", "1.418200 | \n", "
715 | \n", "1.434100 | \n", "
716 | \n", "1.387700 | \n", "
717 | \n", "1.293200 | \n", "
718 | \n", "1.396800 | \n", "
719 | \n", "1.430700 | \n", "
720 | \n", "1.363800 | \n", "
721 | \n", "1.471500 | \n", "
722 | \n", "1.502400 | \n", "
723 | \n", "1.394000 | \n", "
724 | \n", "1.339500 | \n", "
725 | \n", "1.478800 | \n", "
726 | \n", "1.554500 | \n", "
727 | \n", "1.355800 | \n", "
728 | \n", "1.422100 | \n", "
729 | \n", "1.487600 | \n", "
730 | \n", "1.425300 | \n", "
731 | \n", "1.429600 | \n", "
732 | \n", "1.440100 | \n", "
733 | \n", "1.484700 | \n", "
734 | \n", "1.588300 | \n", "
735 | \n", "1.428800 | \n", "
736 | \n", "1.510200 | \n", "
737 | \n", "1.418300 | \n", "
738 | \n", "1.461400 | \n", "
739 | \n", "1.455600 | \n", "
740 | \n", "1.377100 | \n", "
741 | \n", "1.382400 | \n", "
742 | \n", "1.520200 | \n", "
743 | \n", "1.383200 | \n", "
744 | \n", "1.494000 | \n", "
745 | \n", "1.567400 | \n", "
746 | \n", "1.437000 | \n", "
747 | \n", "1.458000 | \n", "
748 | \n", "1.483100 | \n", "
749 | \n", "1.473700 | \n", "
750 | \n", "1.644300 | \n", "
751 | \n", "1.348900 | \n", "
752 | \n", "1.442800 | \n", "
753 | \n", "1.616400 | \n", "
754 | \n", "1.459600 | \n", "
755 | \n", "1.478100 | \n", "
756 | \n", "1.469500 | \n", "
757 | \n", "1.510300 | \n", "
758 | \n", "1.402400 | \n", "
759 | \n", "1.477400 | \n", "
760 | \n", "1.597400 | \n", "
761 | \n", "1.470700 | \n", "
762 | \n", "1.586600 | \n", "
763 | \n", "1.316800 | \n", "
764 | \n", "1.298600 | \n", "
765 | \n", "1.482500 | \n", "
766 | \n", "1.544300 | \n", "
767 | \n", "1.396300 | \n", "
768 | \n", "1.321000 | \n", "
769 | \n", "1.424400 | \n", "
770 | \n", "1.449300 | \n", "
771 | \n", "1.479900 | \n", "
772 | \n", "1.451300 | \n", "
773 | \n", "1.567600 | \n", "
774 | \n", "1.257600 | \n", "
775 | \n", "1.649800 | \n", "
776 | \n", "1.516400 | \n", "
777 | \n", "1.461400 | \n", "
778 | \n", "1.494800 | \n", "
779 | \n", "1.621100 | \n", "
780 | \n", "1.571900 | \n", "
781 | \n", "1.331500 | \n", "
782 | \n", "1.575500 | \n", "
783 | \n", "1.439000 | \n", "
784 | \n", "1.347600 | \n", "
785 | \n", "1.522800 | \n", "
786 | \n", "1.584100 | \n", "
787 | \n", "1.419300 | \n", "
788 | \n", "1.385400 | \n", "
789 | \n", "1.435000 | \n", "
790 | \n", "1.483800 | \n", "
791 | \n", "1.452200 | \n", "
792 | \n", "1.587100 | \n", "
793 | \n", "1.495600 | \n", "
794 | \n", "1.485100 | \n", "
795 | \n", "1.444100 | \n", "
796 | \n", "1.534800 | \n", "
797 | \n", "1.436100 | \n", "
798 | \n", "1.366400 | \n", "
799 | \n", "1.603100 | \n", "
800 | \n", "1.505600 | \n", "
801 | \n", "1.484300 | \n", "
802 | \n", "1.353700 | \n", "
803 | \n", "1.462300 | \n", "
804 | \n", "1.497700 | \n", "
805 | \n", "1.448300 | \n", "
806 | \n", "1.388900 | \n", "
807 | \n", "1.440900 | \n", "
808 | \n", "1.437100 | \n", "
809 | \n", "1.527300 | \n", "
810 | \n", "1.497200 | \n", "
811 | \n", "1.482200 | \n", "
812 | \n", "1.361200 | \n", "
813 | \n", "1.435700 | \n", "
814 | \n", "1.463700 | \n", "
815 | \n", "1.478700 | \n", "
816 | \n", "1.523100 | \n", "
817 | \n", "1.560500 | \n", "
818 | \n", "1.457100 | \n", "
819 | \n", "1.477400 | \n", "
820 | \n", "1.558200 | \n", "
821 | \n", "1.424400 | \n", "
822 | \n", "1.578200 | \n", "
823 | \n", "1.465400 | \n", "
824 | \n", "1.343500 | \n", "
825 | \n", "1.405000 | \n", "
826 | \n", "1.476500 | \n", "
827 | \n", "1.458900 | \n", "
828 | \n", "1.458300 | \n", "
829 | \n", "1.497900 | \n", "
830 | \n", "1.436900 | \n", "
831 | \n", "1.575000 | \n", "
832 | \n", "1.531200 | \n", "
833 | \n", "1.490700 | \n", "
834 | \n", "1.556900 | \n", "
835 | \n", "1.620300 | \n", "
836 | \n", "1.563400 | \n", "
837 | \n", "1.436300 | \n", "
838 | \n", "1.465600 | \n", "
839 | \n", "1.412700 | \n", "
840 | \n", "1.487900 | \n", "
841 | \n", "1.506800 | \n", "
842 | \n", "1.427100 | \n", "
843 | \n", "1.376300 | \n", "
844 | \n", "1.500300 | \n", "
845 | \n", "1.573100 | \n", "
846 | \n", "1.443300 | \n", "
847 | \n", "1.476400 | \n", "
848 | \n", "1.497100 | \n", "
849 | \n", "1.310600 | \n", "
850 | \n", "1.404200 | \n", "
851 | \n", "1.575800 | \n", "
852 | \n", "1.506100 | \n", "
853 | \n", "1.424900 | \n", "
854 | \n", "1.522100 | \n", "
855 | \n", "1.376900 | \n", "
856 | \n", "1.476000 | \n", "
857 | \n", "1.339700 | \n", "
858 | \n", "1.440300 | \n", "
859 | \n", "1.518100 | \n", "
860 | \n", "1.411400 | \n", "
861 | \n", "1.394900 | \n", "
862 | \n", "1.522100 | \n", "
863 | \n", "1.436000 | \n", "
864 | \n", "1.585100 | \n", "
865 | \n", "1.490100 | \n", "
866 | \n", "1.472400 | \n", "
867 | \n", "1.299200 | \n", "
868 | \n", "1.422200 | \n", "
869 | \n", "1.487800 | \n", "
870 | \n", "1.623900 | \n", "
871 | \n", "1.605000 | \n", "
872 | \n", "1.580400 | \n", "
873 | \n", "1.275400 | \n", "
874 | \n", "1.452700 | \n", "
875 | \n", "1.400200 | \n", "
876 | \n", "1.473500 | \n", "
877 | \n", "1.359500 | \n", "
878 | \n", "1.495800 | \n", "
879 | \n", "1.451500 | \n", "
880 | \n", "1.420400 | \n", "
881 | \n", "1.528400 | \n", "
882 | \n", "1.397800 | \n", "
883 | \n", "1.597900 | \n", "
884 | \n", "1.509000 | \n", "
885 | \n", "1.568300 | \n", "
886 | \n", "1.473000 | \n", "
887 | \n", "1.553900 | \n", "
888 | \n", "1.588500 | \n", "
889 | \n", "1.442500 | \n", "
890 | \n", "1.415100 | \n", "
891 | \n", "1.357400 | \n", "
892 | \n", "1.311900 | \n", "
893 | \n", "1.405100 | \n", "
894 | \n", "1.464700 | \n", "
895 | \n", "1.495000 | \n", "
896 | \n", "1.488900 | \n", "
897 | \n", "1.584100 | \n", "
898 | \n", "1.444000 | \n", "
899 | \n", "1.414800 | \n", "
900 | \n", "1.465800 | \n", "
901 | \n", "1.523400 | \n", "
902 | \n", "1.518300 | \n", "
903 | \n", "1.488800 | \n", "
904 | \n", "1.305900 | \n", "
905 | \n", "1.549500 | \n", "
906 | \n", "1.580100 | \n", "
907 | \n", "1.603000 | \n", "
908 | \n", "1.450600 | \n", "
909 | \n", "1.503000 | \n", "
910 | \n", "1.450300 | \n", "
911 | \n", "1.382200 | \n", "
912 | \n", "1.439700 | \n", "
913 | \n", "1.561000 | \n", "
914 | \n", "1.443600 | \n", "
915 | \n", "1.487600 | \n", "
916 | \n", "1.322300 | \n", "
917 | \n", "1.318500 | \n", "
918 | \n", "1.387300 | \n", "
919 | \n", "1.441600 | \n", "
920 | \n", "1.519100 | \n", "
921 | \n", "1.453000 | \n", "
922 | \n", "1.407000 | \n", "
923 | \n", "1.422700 | \n", "
924 | \n", "1.352900 | \n", "
925 | \n", "1.494900 | \n", "
926 | \n", "1.434600 | \n", "
927 | \n", "1.465200 | \n", "
928 | \n", "1.417500 | \n", "
929 | \n", "1.342500 | \n", "
930 | \n", "1.547600 | \n", "
931 | \n", "1.545800 | \n", "
932 | \n", "1.496000 | \n", "
933 | \n", "1.398800 | \n", "
934 | \n", "1.327900 | \n", "
935 | \n", "1.587400 | \n", "
936 | \n", "1.347300 | \n", "
937 | \n", "1.543000 | \n", "
938 | \n", "1.418500 | \n", "
939 | \n", "1.396600 | \n", "
940 | \n", "1.364200 | \n", "
941 | \n", "1.439700 | \n", "
942 | \n", "1.523800 | \n", "
943 | \n", "1.385000 | \n", "
944 | \n", "1.491100 | \n", "
945 | \n", "1.528500 | \n", "
946 | \n", "1.536600 | \n", "
947 | \n", "1.292600 | \n", "
948 | \n", "1.522600 | \n", "
949 | \n", "1.438900 | \n", "
950 | \n", "1.423500 | \n", "
951 | \n", "1.468600 | \n", "
952 | \n", "1.486000 | \n", "
953 | \n", "1.542800 | \n", "
954 | \n", "1.571000 | \n", "
955 | \n", "1.455500 | \n", "
956 | \n", "1.434000 | \n", "
957 | \n", "1.442600 | \n", "
958 | \n", "1.448800 | \n", "
959 | \n", "1.342500 | \n", "
960 | \n", "1.431400 | \n", "
961 | \n", "1.475000 | \n", "
962 | \n", "1.483500 | \n", "
963 | \n", "1.493600 | \n", "
964 | \n", "1.417400 | \n", "
965 | \n", "1.352000 | \n", "
966 | \n", "1.603600 | \n", "
967 | \n", "1.465300 | \n", "
968 | \n", "1.454300 | \n", "
969 | \n", "1.563800 | \n", "
970 | \n", "1.572700 | \n", "
971 | \n", "1.428400 | \n", "
972 | \n", "1.561200 | \n", "
973 | \n", "1.404200 | \n", "
974 | \n", "1.628700 | \n", "
975 | \n", "1.593300 | \n", "
976 | \n", "1.670900 | \n", "
977 | \n", "1.438500 | \n", "
978 | \n", "1.325400 | \n", "
979 | \n", "1.479200 | \n", "
980 | \n", "1.411100 | \n", "
981 | \n", "1.362000 | \n", "
982 | \n", "1.348000 | \n", "
983 | \n", "1.381000 | \n", "
984 | \n", "1.415500 | \n", "
985 | \n", "1.583300 | \n", "
986 | \n", "1.465600 | \n", "
987 | \n", "1.495200 | \n", "
988 | \n", "1.499300 | \n", "
989 | \n", "1.455300 | \n", "
990 | \n", "1.452700 | \n", "
991 | \n", "1.296100 | \n", "
992 | \n", "1.356300 | \n", "
993 | \n", "1.505300 | \n", "
994 | \n", "1.429800 | \n", "
995 | \n", "1.423700 | \n", "
996 | \n", "1.547100 | \n", "
997 | \n", "1.512000 | \n", "
998 | \n", "1.458500 | \n", "
999 | \n", "1.445100 | \n", "
1000 | \n", "1.381500 | \n", "
1001 | \n", "1.508700 | \n", "
1002 | \n", "1.457800 | \n", "
1003 | \n", "1.508300 | \n", "
1004 | \n", "1.370400 | \n", "
1005 | \n", "1.487900 | \n", "
1006 | \n", "1.517900 | \n", "
1007 | \n", "1.492000 | \n", "
1008 | \n", "1.462700 | \n", "
1009 | \n", "1.397000 | \n", "
1010 | \n", "1.522600 | \n", "
1011 | \n", "1.492100 | \n", "
1012 | \n", "1.318800 | \n", "
1013 | \n", "1.501300 | \n", "
1014 | \n", "1.491900 | \n", "
1015 | \n", "1.413900 | \n", "
1016 | \n", "1.453600 | \n", "
1017 | \n", "1.459800 | \n", "
1018 | \n", "1.492700 | \n", "
1019 | \n", "1.471900 | \n", "
1020 | \n", "1.328900 | \n", "
1021 | \n", "1.552300 | \n", "
1022 | \n", "1.300600 | \n", "
1023 | \n", "1.366600 | \n", "
1024 | \n", "1.365000 | \n", "
1025 | \n", "1.420200 | \n", "
1026 | \n", "1.392600 | \n", "
1027 | \n", "1.492400 | \n", "
1028 | \n", "1.524600 | \n", "
1029 | \n", "1.371600 | \n", "
1030 | \n", "1.431100 | \n", "
1031 | \n", "1.471200 | \n", "
1032 | \n", "1.534200 | \n", "
1033 | \n", "1.417100 | \n", "
1034 | \n", "1.394700 | \n", "
1035 | \n", "1.455900 | \n", "
1036 | \n", "1.536200 | \n", "
1037 | \n", "1.626100 | \n", "
1038 | \n", "1.588400 | \n", "
1039 | \n", "1.538200 | \n", "
1040 | \n", "1.375200 | \n", "
1041 | \n", "1.589300 | \n", "
1042 | \n", "1.557200 | \n", "
1043 | \n", "1.526000 | \n", "
1044 | \n", "1.349600 | \n", "
1045 | \n", "1.420000 | \n", "
1046 | \n", "1.444700 | \n", "
1047 | \n", "1.411600 | \n", "
1048 | \n", "1.444600 | \n", "
1049 | \n", "1.591000 | \n", "
1050 | \n", "1.384300 | \n", "
1051 | \n", "1.470500 | \n", "
1052 | \n", "1.380200 | \n", "
1053 | \n", "1.278600 | \n", "
1054 | \n", "1.276000 | \n", "
1055 | \n", "1.363100 | \n", "
1056 | \n", "1.487500 | \n", "
1057 | \n", "1.583300 | \n", "
1058 | \n", "1.470100 | \n", "
1059 | \n", "1.450300 | \n", "
1060 | \n", "1.449600 | \n", "
1061 | \n", "1.509500 | \n", "
1062 | \n", "1.436600 | \n", "
1063 | \n", "1.538900 | \n", "
1064 | \n", "1.336300 | \n", "
1065 | \n", "1.403300 | \n", "
1066 | \n", "1.440900 | \n", "
1067 | \n", "1.482600 | \n", "
1068 | \n", "1.482000 | \n", "
1069 | \n", "1.474700 | \n", "
1070 | \n", "1.539600 | \n", "
1071 | \n", "1.492200 | \n", "
1072 | \n", "1.409400 | \n", "
1073 | \n", "1.445600 | \n", "
1074 | \n", "1.339800 | \n", "
1075 | \n", "1.505300 | \n", "
1076 | \n", "1.513600 | \n", "
1077 | \n", "1.508100 | \n", "
1078 | \n", "1.592900 | \n", "
1079 | \n", "1.465400 | \n", "
1080 | \n", "1.285500 | \n", "
1081 | \n", "1.412400 | \n", "
1082 | \n", "1.588400 | \n", "
1083 | \n", "1.369300 | \n", "
1084 | \n", "1.412800 | \n", "
1085 | \n", "1.517000 | \n", "
1086 | \n", "1.518100 | \n", "
1087 | \n", "1.453300 | \n", "
1088 | \n", "1.358200 | \n", "
1089 | \n", "1.441300 | \n", "
1090 | \n", "1.573100 | \n", "
1091 | \n", "1.470400 | \n", "
1092 | \n", "1.446200 | \n", "
1093 | \n", "1.404700 | \n", "
1094 | \n", "1.325000 | \n", "
1095 | \n", "1.493900 | \n", "
1096 | \n", "1.340800 | \n", "
1097 | \n", "1.408600 | \n", "
1098 | \n", "1.440300 | \n", "
1099 | \n", "1.479400 | \n", "
1100 | \n", "1.390100 | \n", "
1101 | \n", "1.433100 | \n", "
1102 | \n", "1.412200 | \n", "
1103 | \n", "1.382300 | \n", "
1104 | \n", "1.555300 | \n", "
1105 | \n", "1.388700 | \n", "
1106 | \n", "1.450600 | \n", "
1107 | \n", "1.552400 | \n", "
1108 | \n", "1.364400 | \n", "
1109 | \n", "1.338100 | \n", "
1110 | \n", "1.367700 | \n", "
1111 | \n", "1.418500 | \n", "
1112 | \n", "1.449400 | \n", "
1113 | \n", "1.381700 | \n", "
1114 | \n", "1.358700 | \n", "
1115 | \n", "1.406300 | \n", "
1116 | \n", "1.406500 | \n", "
1117 | \n", "1.363200 | \n", "
1118 | \n", "1.523900 | \n", "
1119 | \n", "1.433600 | \n", "
1120 | \n", "1.452200 | \n", "
1121 | \n", "1.544300 | \n", "
1122 | \n", "1.465900 | \n", "
1123 | \n", "1.377600 | \n", "
1124 | \n", "1.440300 | \n", "
1125 | \n", "1.302200 | \n", "
1126 | \n", "1.468200 | \n", "
1127 | \n", "1.378600 | \n", "
1128 | \n", "1.435300 | \n", "
1129 | \n", "1.479000 | \n", "
1130 | \n", "1.382800 | \n", "
1131 | \n", "1.424500 | \n", "
1132 | \n", "1.428200 | \n", "
1133 | \n", "1.469500 | \n", "
1134 | \n", "1.468200 | \n", "
1135 | \n", "1.444400 | \n", "
1136 | \n", "1.544500 | \n", "
1137 | \n", "1.431600 | \n", "
1138 | \n", "1.442000 | \n", "
1139 | \n", "1.537700 | \n", "
1140 | \n", "1.396300 | \n", "
1141 | \n", "1.410400 | \n", "
1142 | \n", "1.438300 | \n", "
1143 | \n", "1.270800 | \n", "
1144 | \n", "1.449900 | \n", "
1145 | \n", "1.492000 | \n", "
1146 | \n", "1.487600 | \n", "
1147 | \n", "1.369300 | \n", "
1148 | \n", "1.365100 | \n", "
1149 | \n", "1.491000 | \n", "
1150 | \n", "1.413800 | \n", "
1151 | \n", "1.563000 | \n", "
1152 | \n", "1.507800 | \n", "
1153 | \n", "1.301600 | \n", "
1154 | \n", "1.511200 | \n", "
1155 | \n", "1.538100 | \n", "
1156 | \n", "1.301700 | \n", "
1157 | \n", "1.379500 | \n", "
1158 | \n", "1.603100 | \n", "
1159 | \n", "1.453100 | \n", "
1160 | \n", "1.422200 | \n", "
1161 | \n", "1.597700 | \n", "
1162 | \n", "1.541900 | \n", "
1163 | \n", "1.456500 | \n", "
1164 | \n", "1.467500 | \n", "
1165 | \n", "1.303300 | \n", "
1166 | \n", "1.495300 | \n", "
1167 | \n", "1.454000 | \n", "
1168 | \n", "1.562400 | \n", "
1169 | \n", "1.406800 | \n", "
1170 | \n", "1.247900 | \n", "
1171 | \n", "1.631900 | \n", "
1172 | \n", "1.394800 | \n", "
1173 | \n", "1.493100 | \n", "
1174 | \n", "1.379300 | \n", "
1175 | \n", "1.334400 | \n", "
1176 | \n", "1.499200 | \n", "
1177 | \n", "1.505100 | \n", "
1178 | \n", "1.415100 | \n", "
1179 | \n", "1.453500 | \n", "
1180 | \n", "1.368400 | \n", "
1181 | \n", "1.459900 | \n", "
1182 | \n", "1.544000 | \n", "
1183 | \n", "1.549300 | \n", "
1184 | \n", "1.580900 | \n", "
1185 | \n", "1.456400 | \n", "
1186 | \n", "1.465700 | \n", "
1187 | \n", "1.457900 | \n", "
1188 | \n", "1.497100 | \n", "
1189 | \n", "1.600700 | \n", "
1190 | \n", "1.438900 | \n", "
1191 | \n", "1.406400 | \n", "
1192 | \n", "1.415300 | \n", "
1193 | \n", "1.442900 | \n", "
1194 | \n", "1.488600 | \n", "
1195 | \n", "1.457500 | \n", "
1196 | \n", "1.484800 | \n", "
1197 | \n", "1.455100 | \n", "
1198 | \n", "1.467500 | \n", "
1199 | \n", "1.568700 | \n", "
1200 | \n", "1.466500 | \n", "
1201 | \n", "1.495300 | \n", "
1202 | \n", "1.496600 | \n", "
1203 | \n", "1.500400 | \n", "
1204 | \n", "1.571200 | \n", "
1205 | \n", "1.448100 | \n", "
1206 | \n", "1.405400 | \n", "
1207 | \n", "1.510100 | \n", "
1208 | \n", "1.400100 | \n", "
1209 | \n", "1.461100 | \n", "
1210 | \n", "1.368100 | \n", "
1211 | \n", "1.474400 | \n", "
1212 | \n", "1.363600 | \n", "
1213 | \n", "1.564700 | \n", "
1214 | \n", "1.553300 | \n", "
1215 | \n", "1.326500 | \n", "
1216 | \n", "1.338000 | \n", "
1217 | \n", "1.407600 | \n", "
1218 | \n", "1.584600 | \n", "
1219 | \n", "1.384300 | \n", "
1220 | \n", "1.461900 | \n", "
1221 | \n", "1.384800 | \n", "
1222 | \n", "1.406000 | \n", "
1223 | \n", "1.500400 | \n", "
1224 | \n", "1.351400 | \n", "
1225 | \n", "1.399500 | \n", "
1226 | \n", "1.415000 | \n", "
1227 | \n", "1.287200 | \n", "
1228 | \n", "1.417100 | \n", "
1229 | \n", "1.372600 | \n", "
1230 | \n", "1.329200 | \n", "
1231 | \n", "1.547300 | \n", "
1232 | \n", "1.395000 | \n", "
1233 | \n", "1.321300 | \n", "
1234 | \n", "1.296700 | \n", "
1235 | \n", "1.414100 | \n", "
1236 | \n", "1.383600 | \n", "
1237 | \n", "1.384600 | \n", "
1238 | \n", "1.401000 | \n", "
1239 | \n", "1.403600 | \n", "
1240 | \n", "1.572300 | \n", "
1241 | \n", "1.422600 | \n", "
1242 | \n", "1.386300 | \n", "
1243 | \n", "1.365200 | \n", "
1244 | \n", "1.430600 | \n", "
1245 | \n", "1.573700 | \n", "
1246 | \n", "1.518800 | \n", "
1247 | \n", "1.399000 | \n", "
1248 | \n", "1.408100 | \n", "
1249 | \n", "1.542400 | \n", "
1250 | \n", "1.504800 | \n", "
"
],
"text/plain": [
"