diff --git a/train_unsloth.ipynb b/train_unsloth.ipynb index ddcffeb..1dae9a2 100644 --- a/train_unsloth.ipynb +++ b/train_unsloth.ipynb @@ -1 +1,15868 @@ -{"metadata":{"accelerator":"GPU","colab":{"provenance":[],"gpuType":"T4"},"kernelspec":{"name":"python3","display_name":"Python 3","language":"python"},"language_info":{"name":"python","version":"3.10.13","mimetype":"text/x-python","codemirror_mode":{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"python","file_extension":".py"},"widgets":{"application/vnd.jupyter.widget-state+json":{"4b6316870ae142ba9abf2b399edaaeea":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_c3315eb9e7ef42c298b7ba5f7636c373","IPY_MODEL_b553db45b4204434b015593b84861d9d","IPY_MODEL_ee88fbb010e349d4a840275150cc59eb"],"layout":"IPY_MODEL_5d9ac0fdb1c24c59a89390b3fbb6481e"}},"c3315eb9e7ef42c298b7ba5f7636c373":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_ee2d57cc909b47e7a96a6348c3bc64c0","placeholder":"","style":"IPY_MODEL_e8f5782342af48549b22edf99517e71d","value":"config.json: 100%"}},"b553db45b4204434b015593b84861d9d":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_b520ce59a04444f6b5c5c9351c02877d","max":1055,"min":0,"orientation":"horizontal","style":"IPY_MODEL_52a94645e9ca44768f44d46e5a4f2f87","value":1055}},"ee88fbb010e349d4a840275150cc59eb":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_f800e837619e4ebbb860a5648a21a078","placeholder":"","style":"IPY_MODEL_be41d9ff020b4a4a8f7ec9a5c5aba8f8","value":" 1.05k/1.05k [00:00<00:00, 14.6kB/s]"}},"5d9ac0fdb1c24c59a89390b3fbb6481e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"ee2d57cc909b47e7a96a6348c3bc64c0":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e8f5782342af48549b22edf99517e71d":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"b520ce59a04444f6b5c5c9351c02877d":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"52a94645e9ca44768f44d46e5a4f2f87":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"f800e837619e4ebbb860a5648a21a078":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"be41d9ff020b4a4a8f7ec9a5c5aba8f8":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"78d1378806944b54b6dc7d9088d0540a":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_16486c7ebb0c40cbb05b0f1a146004bf","IPY_MODEL_3dc95ac19cb948b78179a8617917ec44","IPY_MODEL_aacd5f3b4d6c4730b13c4e75b461bcca"],"layout":"IPY_MODEL_9c439e6ecc57445092c0ea43d6c46640"}},"16486c7ebb0c40cbb05b0f1a146004bf":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_07cde8ce0ca54e3a95a183541d26bc2e","placeholder":"","style":"IPY_MODEL_2f6b00e4bbfc48f5881a8ae386fa8a3a","value":"model.safetensors: 100%"}},"3dc95ac19cb948b78179a8617917ec44":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_e15aa09fe7d74cb7b348bfe142a8b172","max":4125687906,"min":0,"orientation":"horizontal","style":"IPY_MODEL_63f47e74c4b644f1b8e35f392a6a626e","value":4125687906}},"aacd5f3b4d6c4730b13c4e75b461bcca":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_1f5d1e8ff5ea47e4aa04c12bdf2f8df7","placeholder":"","style":"IPY_MODEL_b25110a1ed2b4fe8ae7fc5cdd724362b","value":" 4.13G/4.13G [00:34<00:00, 225MB/s]"}},"9c439e6ecc57445092c0ea43d6c46640":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"07cde8ce0ca54e3a95a183541d26bc2e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"2f6b00e4bbfc48f5881a8ae386fa8a3a":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"e15aa09fe7d74cb7b348bfe142a8b172":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"63f47e74c4b644f1b8e35f392a6a626e":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"1f5d1e8ff5ea47e4aa04c12bdf2f8df7":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"b25110a1ed2b4fe8ae7fc5cdd724362b":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"661c67646b5948449c2d71f5814776b4":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_2ea177d7a1904285a4f641c7581736b9","IPY_MODEL_8234c35bfe33441a8fdcf48dccc0563b","IPY_MODEL_50d47462e5ea45a7a3e75b88fa3e272e"],"layout":"IPY_MODEL_75c8a575acd94d0ab2ee673af56f56c1"}},"2ea177d7a1904285a4f641c7581736b9":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_f07da0c5e3da4213ade400cb3a6db827","placeholder":"","style":"IPY_MODEL_6ef59867eace4a8aba3bc6652b579b7e","value":"generation_config.json: 100%"}},"8234c35bfe33441a8fdcf48dccc0563b":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_6d603e8d4b504239adf1820f6c1b608e","max":116,"min":0,"orientation":"horizontal","style":"IPY_MODEL_cab2232116d647ef94aa89ffa0515774","value":116}},"50d47462e5ea45a7a3e75b88fa3e272e":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_077d992a3da3424194ee45301e4c02ba","placeholder":"","style":"IPY_MODEL_0c5fe7ba93aa496a9eea2bdb8d692fb5","value":" 116/116 [00:00<00:00, 5.61kB/s]"}},"75c8a575acd94d0ab2ee673af56f56c1":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"f07da0c5e3da4213ade400cb3a6db827":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6ef59867eace4a8aba3bc6652b579b7e":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"6d603e8d4b504239adf1820f6c1b608e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"cab2232116d647ef94aa89ffa0515774":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"077d992a3da3424194ee45301e4c02ba":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"0c5fe7ba93aa496a9eea2bdb8d692fb5":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"3f40e8b99e1043e8917ff2c3e1e3dd29":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_0fabb8d03b0d42178538a829c843dfed","IPY_MODEL_5c6d56ca98e146d6853289851fe5baf7","IPY_MODEL_07e004c9526c4481836d68ef53c9b669"],"layout":"IPY_MODEL_128211077ca940d499d90667ad27dacb"}},"0fabb8d03b0d42178538a829c843dfed":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_6e6b1089e6504aa5a2ccc61953b5e433","placeholder":"","style":"IPY_MODEL_0ef52843237c48dd9566c4dff3e7b42c","value":"tokenizer_config.json: 100%"}},"5c6d56ca98e146d6853289851fe5baf7":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_c7fd527ba5e74ddfb3a1376bb219b6c1","max":971,"min":0,"orientation":"horizontal","style":"IPY_MODEL_c4a594e3a66d49989c914bbc93b679c1","value":971}},"07e004c9526c4481836d68ef53c9b669":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_c87c9608e866410ba46d49b44d0867ca","placeholder":"","style":"IPY_MODEL_e5a03a389d9a4f68bbc7c7939ecf7206","value":" 971/971 [00:00<00:00, 75.0kB/s]"}},"128211077ca940d499d90667ad27dacb":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6e6b1089e6504aa5a2ccc61953b5e433":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"0ef52843237c48dd9566c4dff3e7b42c":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"c7fd527ba5e74ddfb3a1376bb219b6c1":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"c4a594e3a66d49989c914bbc93b679c1":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"c87c9608e866410ba46d49b44d0867ca":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e5a03a389d9a4f68bbc7c7939ecf7206":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"ced6f4b20c304ab6b19858766505a596":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_62ddc3bbcdcc4cb2993828d860ffaacd","IPY_MODEL_11d662dad31041baba79243d3ed687c0","IPY_MODEL_4f1a242530a3417c8309a7eb824f0c4a"],"layout":"IPY_MODEL_10ba528042394217b6cf5d3bd6288db0"}},"62ddc3bbcdcc4cb2993828d860ffaacd":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_f5a09713f5d2419cac940042d7900921","placeholder":"","style":"IPY_MODEL_eb11542214a344c7827fa28472f4b5ef","value":"tokenizer.model: 100%"}},"11d662dad31041baba79243d3ed687c0":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_cd0f5f8d4f3345e09bb76f11ad197934","max":493443,"min":0,"orientation":"horizontal","style":"IPY_MODEL_ccfcff888b8941a9987071ea8c284d17","value":493443}},"4f1a242530a3417c8309a7eb824f0c4a":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_c947d50a70694e0f8cbe5351b8219938","placeholder":"","style":"IPY_MODEL_e32de93685f249828a0a73a52cf1438b","value":" 493k/493k [00:00<00:00, 2.23MB/s]"}},"10ba528042394217b6cf5d3bd6288db0":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"f5a09713f5d2419cac940042d7900921":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"eb11542214a344c7827fa28472f4b5ef":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"cd0f5f8d4f3345e09bb76f11ad197934":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"ccfcff888b8941a9987071ea8c284d17":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"c947d50a70694e0f8cbe5351b8219938":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e32de93685f249828a0a73a52cf1438b":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"3f03fca984754fd7897ac389e661511e":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_8907ec143bb8415fb045fa3480a0c024","IPY_MODEL_7347484610f9459ba1b11f32825b9a6f","IPY_MODEL_63f7b027083f44139b387ed6595a5f10"],"layout":"IPY_MODEL_2a5c6adc4491433392b708c854d05786"}},"8907ec143bb8415fb045fa3480a0c024":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_b1eb529e794c4afb9591a92f383234c7","placeholder":"","style":"IPY_MODEL_e2906dc03b4640d6aea44b52f6292189","value":"tokenizer.json: 100%"}},"7347484610f9459ba1b11f32825b9a6f":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_04bb72bc05384a7fab8384b96a9165b0","max":1795303,"min":0,"orientation":"horizontal","style":"IPY_MODEL_a9c322a9110d44b8b06438c6f6f5acac","value":1795303}},"63f7b027083f44139b387ed6595a5f10":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_de7ab932d6954f73adef9fffaa6afee0","placeholder":"","style":"IPY_MODEL_a5b22ec5d43345a2b957301fed556b94","value":" 1.80M/1.80M [00:00<00:00, 27.3MB/s]"}},"2a5c6adc4491433392b708c854d05786":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"b1eb529e794c4afb9591a92f383234c7":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e2906dc03b4640d6aea44b52f6292189":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"04bb72bc05384a7fab8384b96a9165b0":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"a9c322a9110d44b8b06438c6f6f5acac":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"de7ab932d6954f73adef9fffaa6afee0":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"a5b22ec5d43345a2b957301fed556b94":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"ff1229fe81b2467892fcebb5516c586a":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_9bc4151c386e4fe49b2aa882159aff88","IPY_MODEL_4d2e7f9d8ec34c24a869d75c1e08cf54","IPY_MODEL_e2b109ab9d664847b1611783ed75b240"],"layout":"IPY_MODEL_9195aa2c13a247b6836701b7d47f8f6d"}},"9bc4151c386e4fe49b2aa882159aff88":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_bd68af52e4014418b87adc1338bc7047","placeholder":"","style":"IPY_MODEL_0f5ea613817347839697acfec3aef00b","value":"special_tokens_map.json: 100%"}},"4d2e7f9d8ec34c24a869d75c1e08cf54":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_8d8d93e079fb43479c6aa736837482c7","max":438,"min":0,"orientation":"horizontal","style":"IPY_MODEL_2178818d79ff4963b663e74c04303912","value":438}},"e2b109ab9d664847b1611783ed75b240":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_9653b6a3d7fd4014ad6261b8a91d0225","placeholder":"","style":"IPY_MODEL_4f2138b17ad24fa894566001543e7e55","value":" 438/438 [00:00<00:00, 25.0kB/s]"}},"9195aa2c13a247b6836701b7d47f8f6d":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"bd68af52e4014418b87adc1338bc7047":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"0f5ea613817347839697acfec3aef00b":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"8d8d93e079fb43479c6aa736837482c7":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"2178818d79ff4963b663e74c04303912":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"9653b6a3d7fd4014ad6261b8a91d0225":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4f2138b17ad24fa894566001543e7e55":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"9ae302db43ac477fbfac4524d9aa3382":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_094b6478ec2b402fac8d4fc0c2921687","IPY_MODEL_64ae5dbec3fa4ea3abbada99ddc5b95f","IPY_MODEL_d2f03ac06cf24ecbaf9fecebfbc75704"],"layout":"IPY_MODEL_e00d429535b2418cb3f2caafcb661b2b"}},"094b6478ec2b402fac8d4fc0c2921687":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_2aaf95a7cdeb424bb369b658b0d91850","placeholder":"","style":"IPY_MODEL_439a8d79e8064125b694e23d8b3cda99","value":"Downloading readme: 100%"}},"64ae5dbec3fa4ea3abbada99ddc5b95f":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_05c7968b32744985bdd67900cc5cc5d0","max":11610,"min":0,"orientation":"horizontal","style":"IPY_MODEL_e90def295593458e99241c538186c3b5","value":11610}},"d2f03ac06cf24ecbaf9fecebfbc75704":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_8fb7fa4f8a274d3eb3e58953b1e673b0","placeholder":"","style":"IPY_MODEL_6404a027f5b0439786996b45695ea17c","value":" 11.6k/11.6k [00:00<00:00, 193kB/s]"}},"e00d429535b2418cb3f2caafcb661b2b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"2aaf95a7cdeb424bb369b658b0d91850":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"439a8d79e8064125b694e23d8b3cda99":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"05c7968b32744985bdd67900cc5cc5d0":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e90def295593458e99241c538186c3b5":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"8fb7fa4f8a274d3eb3e58953b1e673b0":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6404a027f5b0439786996b45695ea17c":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"e7163211ff064cf68f2a9a92a1a44b14":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_75da1ca588cd427083ebcc98af74ad45","IPY_MODEL_a0e64b31b0744d6f9a540d6d71f29898","IPY_MODEL_0fda3d9d81764040b701f44c022d3a97"],"layout":"IPY_MODEL_a629a4f39f904d918cfb1940f2756cb1"}},"75da1ca588cd427083ebcc98af74ad45":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_25b8bf991b234f30af21842b423e8c7d","placeholder":"","style":"IPY_MODEL_dd1377f789ad4439b6bf99843fe4ca9a","value":"Downloading data: 100%"}},"a0e64b31b0744d6f9a540d6d71f29898":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_92a3e0edfd034150b1ef3f07f21957c5","max":44307561,"min":0,"orientation":"horizontal","style":"IPY_MODEL_caf3a07d739144cca5a246be40ed1d2c","value":44307561}},"0fda3d9d81764040b701f44c022d3a97":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_3f4d8437a14d42f1adb2314b8f090f91","placeholder":"","style":"IPY_MODEL_de5acf3da485445d9955603111bc9bb1","value":" 44.3M/44.3M [00:01<00:00, 31.4MB/s]"}},"a629a4f39f904d918cfb1940f2756cb1":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"25b8bf991b234f30af21842b423e8c7d":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"dd1377f789ad4439b6bf99843fe4ca9a":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"92a3e0edfd034150b1ef3f07f21957c5":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"caf3a07d739144cca5a246be40ed1d2c":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"3f4d8437a14d42f1adb2314b8f090f91":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"de5acf3da485445d9955603111bc9bb1":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"49606713136e4b4f809e88edec0ee31f":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_41a9ff9396454511ac9ec7cb8cd73999","IPY_MODEL_90f3e5054101499a9d623edd73a58aaf","IPY_MODEL_8d2a2929aed1450f909f67a82efb072c"],"layout":"IPY_MODEL_00daf2c57f724c5b8581554ca2f2b2c1"}},"41a9ff9396454511ac9ec7cb8cd73999":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_221f374f77914569ad4cf0464e8c183e","placeholder":"","style":"IPY_MODEL_f52c132e71ce4f919abcff5a7df10a52","value":"Generating train split: "}},"90f3e5054101499a9d623edd73a58aaf":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_45e6fb6a4f144daab3f859cadabff196","max":1,"min":0,"orientation":"horizontal","style":"IPY_MODEL_fdcf4a1a700f4688ab19d55af784cf42","value":1}},"8d2a2929aed1450f909f67a82efb072c":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_f02cb5a79f404c3db58a86e010d929b5","placeholder":"","style":"IPY_MODEL_64763f108b13475d85f44f4eb1f4a7f7","value":" 51760/0 [00:01<00:00, 42051.37 examples/s]"}},"00daf2c57f724c5b8581554ca2f2b2c1":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"221f374f77914569ad4cf0464e8c183e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"f52c132e71ce4f919abcff5a7df10a52":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"45e6fb6a4f144daab3f859cadabff196":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":"20px"}},"fdcf4a1a700f4688ab19d55af784cf42":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"f02cb5a79f404c3db58a86e010d929b5":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"64763f108b13475d85f44f4eb1f4a7f7":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"add655fe6f304c5b8c7f5c374f73fe84":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_76de4b26f31b45f9a8eb31413badd4d0","IPY_MODEL_4c31d1803e134ed38a67c9cd61097a28","IPY_MODEL_3edd0e9e876f4429be155e3378922f30"],"layout":"IPY_MODEL_b1e594f36f3c4751b648abcbb74c28cd"}},"76de4b26f31b45f9a8eb31413badd4d0":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_304eef6608044f45ae0e78c4ce32233a","placeholder":"","style":"IPY_MODEL_fb2e02192e1f4972bc75441d217168e1","value":"Map: 100%"}},"4c31d1803e134ed38a67c9cd61097a28":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_92d2d40d99304e18b11bcb01093ad510","max":51760,"min":0,"orientation":"horizontal","style":"IPY_MODEL_4dc2fb7510824a78bd12f58fc2594a89","value":51760}},"3edd0e9e876f4429be155e3378922f30":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_8b01eafd97dd48cfb86952d0946aa856","placeholder":"","style":"IPY_MODEL_0ba86603e45d473f89ce572c6874f2fe","value":" 51760/51760 [00:01<00:00, 41134.95 examples/s]"}},"b1e594f36f3c4751b648abcbb74c28cd":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"304eef6608044f45ae0e78c4ce32233a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"fb2e02192e1f4972bc75441d217168e1":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"92d2d40d99304e18b11bcb01093ad510":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4dc2fb7510824a78bd12f58fc2594a89":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"8b01eafd97dd48cfb86952d0946aa856":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"0ba86603e45d473f89ce572c6874f2fe":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"28408ab4afb7494f8cb2f834458f411c":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_b85ef639d1f54fb3ac8902fb66be79eb","IPY_MODEL_0c266c0637064fbe8d548d0876c6b315","IPY_MODEL_aa315e0b196d4b80a114690a689d5fbf"],"layout":"IPY_MODEL_5b6f098adcb84717b162152ddaadfa0b"}},"b85ef639d1f54fb3ac8902fb66be79eb":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_caaf2d5a555f41fab3b74f3096375ba9","placeholder":"","style":"IPY_MODEL_1789d926e02549b19e4aca59f44f320a","value":"Map (num_proc=2): 100%"}},"0c266c0637064fbe8d548d0876c6b315":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_3816c6f816784e5ca11a458bbf790031","max":51760,"min":0,"orientation":"horizontal","style":"IPY_MODEL_954d6540772d40a99f642c2524db3a0c","value":51760}},"aa315e0b196d4b80a114690a689d5fbf":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_fdc5073e7cc24edc8c5b920a363b4a40","placeholder":"","style":"IPY_MODEL_f69d18a0b745434d8d078939e3a0c7fc","value":" 51760/51760 [00:42<00:00, 1876.91 examples/s]"}},"5b6f098adcb84717b162152ddaadfa0b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"caaf2d5a555f41fab3b74f3096375ba9":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"1789d926e02549b19e4aca59f44f320a":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"3816c6f816784e5ca11a458bbf790031":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"954d6540772d40a99f642c2524db3a0c":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"fdc5073e7cc24edc8c5b920a363b4a40":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"f69d18a0b745434d8d078939e3a0c7fc":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}}}},"kaggle":{"accelerator":"nvidiaTeslaT4","dataSources":[{"sourceId":8511152,"sourceType":"datasetVersion","datasetId":5080560},{"sourceId":8534734,"sourceType":"datasetVersion","datasetId":4675483}],"isInternetEnabled":true,"language":"python","sourceType":"notebook","isGpuEnabled":true}},"nbformat_minor":4,"nbformat":4,"cells":[{"cell_type":"markdown","source":"To run this, press \"*Runtime*\" and press \"*Run all*\" on a **free** Tesla T4 Google Colab instance!\n
\n\nTo install Unsloth on your own computer, follow the installation instructions on our Github page [here](https://github.com/unslothai/unsloth#installation-instructions---conda).\n\nYou will learn how to do [data prep](#Data), how to [train](#Train), how to [run the model](#Inference), & [how to save it](#Save) (eg for Llama.cpp).","metadata":{"id":"IqM-T1RTzY6C"}},{"cell_type":"markdown","source":"## Kaggle is slow - you'll have to wait **5 minutes** for it to install.\n\nI suggest you to use our free Colab notebooks instead. I linked our Mistral Colab notebook here: [notebook](https://colab.research.google.com/drive/1Dyauq4kTZoLewQ1cApceUQVNcnnNTzg_?usp=sharing)","metadata":{}},{"cell_type":"code","source":"%%capture\n!pip install -U \"xformers<0.0.26\" --index-url https://download.pytorch.org/whl/cu121\n!pip install \"unsloth[kaggle-new] @ git+https://github.com/unslothai/unsloth.git\"\n\n# Temporary fix for https://github.com/huggingface/datasets/issues/6753\n!pip install datasets==2.16.0 fsspec==2023.10.0 gcsfs==2023.10.0\n\nimport os\nos.environ[\"WANDB_DISABLED\"] = \"true\"","metadata":{"execution":{"iopub.status.busy":"2024-05-25T04:02:01.128438Z","iopub.execute_input":"2024-05-25T04:02:01.128773Z","iopub.status.idle":"2024-05-25T04:05:55.463554Z","shell.execute_reply.started":"2024-05-25T04:02:01.128749Z","shell.execute_reply":"2024-05-25T04:05:55.462209Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":"* We support Llama, Mistral, CodeLlama, TinyLlama, Vicuna, Open Hermes etc\n* And Yi, Qwen ([llamafied](https://huggingface.co/models?sort=trending&search=qwen+llama)), Deepseek, all Llama, Mistral derived archs.\n* We support 16bit LoRA or 4bit QLoRA. Both 2x faster.\n* `max_seq_length` can be set to anything, since we do automatic RoPE Scaling via [kaiokendev's](https://kaiokendev.github.io/til) method.\n* [**NEW**] With [PR 26037](https://github.com/huggingface/transformers/pull/26037), we support downloading 4bit models **4x faster**! [Our repo](https://huggingface.co/unsloth) has Llama, Mistral 4bit models.","metadata":{"id":"r2v_X2fA0Df5"}},{"cell_type":"code","source":"from unsloth import FastLanguageModel\nimport torch\nmax_seq_length = 2048 # Choose any! We auto support RoPE Scaling internally!\ndtype = None # None for auto detection. Float16 for Tesla T4, V100, Bfloat16 for Ampere+\nload_in_4bit = True # Use 4bit quantization to reduce memory usage. Can be False.\n\n# 4bit pre quantized models we support for 4x faster downloading + no OOMs.\nfourbit_models = [\n \"unsloth/mistral-7b-bnb-4bit\",\n \"unsloth/mistral-7b-instruct-v0.2-bnb-4bit\",\n \"unsloth/llama-2-7b-bnb-4bit\",\n \"unsloth/llama-2-13b-bnb-4bit\",\n \"unsloth/codellama-34b-bnb-4bit\",\n \"unsloth/tinyllama-bnb-4bit\",\n \"unsloth/llama-3-8b-bnb-4bit\",\n \"unsloth/llama-3-70b-bnb-4bit\",\n] # More models at https://huggingface.co/unsloth\n\nmodel, tokenizer = FastLanguageModel.from_pretrained(\n model_name = \"Orenguteng/Llama-3-8B-Lexi-Uncensored\", # Choose ANY! eg teknium/OpenHermes-2.5-Mistral-7B\n max_seq_length = max_seq_length,\n dtype = dtype,\n load_in_4bit = load_in_4bit,\n use_gradient_checkpointing = \"unsloth\", # We cut memory usage by a further 30% and now support fine-tuning of LLMs with 4x longer context windows!\n # token = \"hf_...\", # use one if using gated models like meta-llama/Llama-2-7b-hf\n)","metadata":{"id":"QmUBVEnvCDJv","outputId":"5eff0d61-05b4-471c-eea2-c2e84a915109","execution":{"iopub.status.busy":"2024-05-25T04:06:55.008762Z","iopub.execute_input":"2024-05-25T04:06:55.009117Z","iopub.status.idle":"2024-05-25T04:07:35.338067Z","shell.execute_reply.started":"2024-05-25T04:06:55.009090Z","shell.execute_reply":"2024-05-25T04:07:35.337098Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":"We now add LoRA adapters so we only need to update 1 to 10% of all parameters!","metadata":{"id":"SXd9bTZd1aaL"}},{"cell_type":"code","source":"model = FastLanguageModel.get_peft_model(\n model,\n r = 32, # Choose any number > 0 ! Suggested 8, 16, 32, 64, 128\n target_modules = [\"q_proj\", \"k_proj\", \"v_proj\", \"o_proj\",\n \"gate_proj\", \"up_proj\", \"down_proj\",],\n lora_alpha = 16,\n lora_dropout = 0, # Supports any, but = 0 is optimized\n bias = \"none\", # Supports any, but = \"none\" is optimized\n use_gradient_checkpointing = \"unsloth\", # 4x longer contexts auto supported!\n random_state = 3407,\n use_rslora = False, # We support rank stabilized LoRA\n loftq_config = None, # And LoftQ\n)","metadata":{"id":"6bZsfBuZDeCL","outputId":"b630cc80-ff95-45a2-cc0d-38666010d73b","execution":{"iopub.status.busy":"2024-05-25T04:23:33.920458Z","iopub.execute_input":"2024-05-25T04:23:33.920865Z","iopub.status.idle":"2024-05-25T04:23:34.015573Z","shell.execute_reply.started":"2024-05-25T04:23:33.920836Z","shell.execute_reply":"2024-05-25T04:23:34.014490Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":"\n### Data Prep\nWe now use the Alpaca dataset from [yahma](https://huggingface.co/datasets/yahma/alpaca-cleaned), which is a filtered version of 52K of the original [Alpaca dataset](https://crfm.stanford.edu/2023/03/13/alpaca.html). You can replace this code section with your own data prep.\n\n**[NOTE]** To train only on completions (ignoring the user's input) read TRL's docs [here](https://huggingface.co/docs/trl/sft_trainer#train-on-completions-only).\n\n**[NOTE]** Remember to add the **EOS_TOKEN** to the tokenized output!! Otherwise you'll get infinite generations!\n\nIf you want to use the `ChatML` template for ShareGPT datasets, try our conversational [notebook](https://colab.research.google.com/drive/1Aau3lgPzeZKQ-98h69CCu1UJcvIBLmy2?usp=sharing).\n\nFor text completions like novel writing, try this [notebook](https://colab.research.google.com/drive/1ef-tab5bhkvWmBOObepl1WgJvfvSzn5Q?usp=sharing).","metadata":{"id":"vITh0KVJ10qX"}},{"cell_type":"code","source":"from datasets import load_dataset\nimport json\nfrom unsloth.chat_templates import get_chat_template\n\ntokenizer = get_chat_template(\n tokenizer,\n chat_template = \"llama-3\", # Supports zephyr, chatml, mistral, llama, alpaca, vicuna, vicuna_old, unsloth\n #mapping = {\"role\" : \"from\", \"content\" : \"value\", \"user\" : \"human\", \"assistant\" : \"gpt\"}, # ShareGPT style\n map_eos_token = True, # Maps <|im_end|> to instead\n)\n\ndef formatting_prompts_func(convos):\n texts = [tokenizer.apply_chat_template(convo, tokenize = False, add_generation_prompt = False) for convo in convos]\n return { \"text\" : texts, }\n\nwith open(\"/kaggle/input/the-group-chat/output-10k-c-dropout.json\") as chatfile:\n convos = [json.loads(j) for j in chatfile.readlines()]\n\nwith open(\"/kaggle/input/toxicqa/toxicQAfinal.json\") as chatfile:\n convos += [json.loads(j) for j in chatfile.readlines()]\n \ndataset = formatting_prompts_func(convos)","metadata":{"id":"LjY75GoYUCB8","outputId":"9f40f734-788c-4793-c1af-e9d003337612","execution":{"iopub.status.busy":"2024-05-25T04:28:11.710969Z","iopub.execute_input":"2024-05-25T04:28:11.711971Z","iopub.status.idle":"2024-05-25T04:28:13.097432Z","shell.execute_reply.started":"2024-05-25T04:28:11.711936Z","shell.execute_reply":"2024-05-25T04:28:13.096601Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"from datasets import Dataset\ndataset = Dataset.from_dict(dataset)","metadata":{},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":"\n### Train the model\nNow let's use Huggingface TRL's `SFTTrainer`! More docs here: [TRL SFT docs](https://huggingface.co/docs/trl/sft_trainer). We do 60 steps to speed things up, but you can set `num_train_epochs=1` for a full run, and turn off `max_steps=None`. We also support TRL's `DPOTrainer`!","metadata":{"id":"idAEIeSQ3xdS"}},{"cell_type":"code","source":"from trl import SFTTrainer\nfrom transformers import TrainingArguments\n\ntrainer = SFTTrainer(\n model = model,\n tokenizer = tokenizer,\n train_dataset = dataset,\n dataset_text_field = \"text\",\n max_seq_length = max_seq_length,\n dataset_num_proc = 2,\n packing = False, # Can make training 5x faster for short sequences.\n args = TrainingArguments(\n per_device_train_batch_size = 2,\n gradient_accumulation_steps = 4,\n warmup_steps = 5,\n num_train_epochs=1,\n learning_rate = 2e-4,\n fp16 = not torch.cuda.is_bf16_supported(),\n bf16 = torch.cuda.is_bf16_supported(),\n logging_steps = 1,\n optim = \"adamw_8bit\",\n weight_decay = 0.01,\n lr_scheduler_type = \"linear\",\n seed = 3407,\n output_dir = \"outputs\",\n report_to = \"none\",\n ),\n)","metadata":{"id":"95_Nn-89DhsL","outputId":"4b809e6d-271f-446f-dec8-abe0d13259f8","execution":{"iopub.status.busy":"2024-05-25T04:28:27.973142Z","iopub.execute_input":"2024-05-25T04:28:27.973856Z","iopub.status.idle":"2024-05-25T04:28:28.119131Z","shell.execute_reply.started":"2024-05-25T04:28:27.973822Z","shell.execute_reply":"2024-05-25T04:28:28.117976Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"#@title Show current memory stats\ngpu_stats = torch.cuda.get_device_properties(0)\nstart_gpu_memory = round(torch.cuda.max_memory_reserved() / 1024 / 1024 / 1024, 3)\nmax_memory = round(gpu_stats.total_memory / 1024 / 1024 / 1024, 3)\nprint(f\"GPU = {gpu_stats.name}. Max memory = {max_memory} GB.\")\nprint(f\"{start_gpu_memory} GB of memory reserved.\")","metadata":{"id":"2ejIt2xSNKKp","cellView":"form","outputId":"4815a050-0c0f-4a6a-9d93-b01c44eaea35","execution":{"iopub.status.busy":"2024-04-06T16:21:16.730485Z","iopub.execute_input":"2024-04-06T16:21:16.730782Z","iopub.status.idle":"2024-04-06T16:21:16.737279Z","shell.execute_reply.started":"2024-04-06T16:21:16.730754Z","shell.execute_reply":"2024-04-06T16:21:16.736403Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"trainer_stats = trainer.train()","metadata":{"id":"yqxqAZ7KJ4oL","outputId":"3cf26aac-6042-4458-c4a6-d8849efb6a95","execution":{"iopub.status.busy":"2024-04-06T16:21:16.738651Z","iopub.execute_input":"2024-04-06T16:21:16.739026Z","iopub.status.idle":"2024-04-06T16:30:10.783093Z","shell.execute_reply.started":"2024-04-06T16:21:16.738993Z","shell.execute_reply":"2024-04-06T16:30:10.782238Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"#@title Show final memory and time stats\nused_memory = round(torch.cuda.max_memory_reserved() / 1024 / 1024 / 1024, 3)\nused_memory_for_lora = round(used_memory - start_gpu_memory, 3)\nused_percentage = round(used_memory /max_memory*100, 3)\nlora_percentage = round(used_memory_for_lora/max_memory*100, 3)\nprint(f\"{trainer_stats.metrics['train_runtime']} seconds used for training.\")\nprint(f\"{round(trainer_stats.metrics['train_runtime']/60, 2)} minutes used for training.\")\nprint(f\"Peak reserved memory = {used_memory} GB.\")\nprint(f\"Peak reserved memory for training = {used_memory_for_lora} GB.\")\nprint(f\"Peak reserved memory % of max memory = {used_percentage} %.\")\nprint(f\"Peak reserved memory for training % of max memory = {lora_percentage} %.\")","metadata":{"id":"pCqnaKmlO1U9","cellView":"form","outputId":"cf63d152-e152-468c-ba0d-938e0d2f71a0","execution":{"iopub.status.busy":"2024-04-06T16:30:10.784435Z","iopub.execute_input":"2024-04-06T16:30:10.7848Z","iopub.status.idle":"2024-04-06T16:30:10.791887Z","shell.execute_reply.started":"2024-04-06T16:30:10.784767Z","shell.execute_reply":"2024-04-06T16:30:10.791092Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":"\n### Inference\nLet's run the model! You can change the instruction and input - leave the output blank!","metadata":{"id":"ekOmTR1hSNcr"}},{"cell_type":"code","source":"if False:\n # alpaca_prompt = Copied from above\n FastLanguageModel.for_inference(model) # Enable native 2x faster inference\n inputs = tokenizer(\n [\n alpaca_prompt.format(\n \"Continue the fibonnaci sequence.\", # instruction\n \"1, 1, 2, 3, 5, 8\", # input\n \"\", # output - leave this blank for generation!\n )\n ], return_tensors = \"pt\").to(\"cuda\")\n\n outputs = model.generate(**inputs, max_new_tokens = 64, use_cache = True)\n tokenizer.batch_decode(outputs)","metadata":{"id":"kR3gIAX-SM2q","outputId":"5b71f982-38c0-44c8-a4e5-58cd20b5a585","execution":{"iopub.status.busy":"2024-04-06T16:30:10.793045Z","iopub.execute_input":"2024-04-06T16:30:10.793321Z","iopub.status.idle":"2024-04-06T16:30:13.837651Z","shell.execute_reply.started":"2024-04-06T16:30:10.793298Z","shell.execute_reply":"2024-04-06T16:30:13.836679Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":" You can also use a `TextStreamer` for continuous inference - so you can see the generation token by token, instead of waiting the whole time!","metadata":{"id":"CrSvZObor0lY"}},{"cell_type":"code","source":"if False:\n # alpaca_prompt = Copied from above\n FastLanguageModel.for_inference(model) # Enable native 2x faster inference\n inputs = tokenizer(\n [\n alpaca_prompt.format(\n \"Continue the fibonnaci sequence.\", # instruction\n \"1, 1, 2, 3, 5, 8\", # input\n \"\", # output - leave this blank for generation!\n )\n ], return_tensors = \"pt\").to(\"cuda\")\n\n from transformers import TextStreamer\n text_streamer = TextStreamer(tokenizer)\n _ = model.generate(**inputs, streamer = text_streamer, max_new_tokens = 128)","metadata":{"id":"e2pEuRb1r2Vg","outputId":"084aab62-2122-436a-c0cb-8871986640eb","execution":{"iopub.status.busy":"2024-04-06T16:30:13.840849Z","iopub.execute_input":"2024-04-06T16:30:13.841138Z","iopub.status.idle":"2024-04-06T16:30:15.541954Z","shell.execute_reply.started":"2024-04-06T16:30:13.841114Z","shell.execute_reply":"2024-04-06T16:30:15.54076Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":"\n### Saving, loading finetuned models\nTo save the final model as LoRA adapters, either use Huggingface's `push_to_hub` for an online save or `save_pretrained` for a local save.\n\n**[NOTE]** This ONLY saves the LoRA adapters, and not the full model. To save to 16bit or GGUF, scroll down!","metadata":{"id":"uMuVrWbjAzhc"}},{"cell_type":"code","source":"#model.save_pretrained(\"lora_model\") # Local saving\nmodel.push_to_hub(\"scoliono/groupchat_lora_lexi_8b\", token = \"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\")","metadata":{"id":"upcOlWe7A1vc","execution":{"iopub.status.busy":"2024-04-06T16:30:15.543701Z","iopub.execute_input":"2024-04-06T16:30:15.544355Z","iopub.status.idle":"2024-04-06T16:30:16.234142Z","shell.execute_reply.started":"2024-04-06T16:30:15.544315Z","shell.execute_reply":"2024-04-06T16:30:16.233363Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":"Now if you want to load the LoRA adapters we just saved for inference, set `False` to `True`:","metadata":{"id":"AEEcJ4qfC7Lp"}},{"cell_type":"code","source":"if False:\n from unsloth import FastLanguageModel\n model, tokenizer = FastLanguageModel.from_pretrained(\n model_name = \"scoliono/groupchat_lora_instruct\", # YOUR MODEL YOU USED FOR TRAINING\n max_seq_length = max_seq_length,\n dtype = dtype,\n load_in_4bit = load_in_4bit,\n )\n FastLanguageModel.for_inference(model) # Enable native 2x faster inference\n\n # alpaca_prompt = You MUST copy from above!\n\n inputs = tokenizer(\n [\n alpaca_prompt.format(\n \"What is a famous tall tower in Paris?\", # instruction\n \"\", # input\n \"\", # output - leave this blank for generation!\n )\n ], return_tensors = \"pt\").to(\"cuda\")\n\n outputs = model.generate(**inputs, max_new_tokens = 64, use_cache = True)\n tokenizer.batch_decode(outputs)","metadata":{"id":"MKX_XKs_BNZR","outputId":"05e5a193-dab0-41db-e07c-4b3afbdd7932","execution":{"iopub.status.busy":"2024-04-06T16:30:16.235412Z","iopub.execute_input":"2024-04-06T16:30:16.236127Z","iopub.status.idle":"2024-04-06T16:30:20.286318Z","shell.execute_reply.started":"2024-04-06T16:30:16.236092Z","shell.execute_reply":"2024-04-06T16:30:20.285241Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":"You can also use Hugging Face's `AutoModelForPeftCausalLM`. Only use this if you do not have `unsloth` installed. It can be hopelessly slow, since `4bit` model downloading is not supported, and Unsloth's **inference is 2x faster**.","metadata":{"id":"QQMjaNrjsU5_"}},{"cell_type":"code","source":"if False:\n # I highly do NOT suggest - use Unsloth if possible\n from peft import AutoPeftModelForCausalLM\n from transformers import AutoTokenizer\n model = AutoPeftModelForCausalLM.from_pretrained(\n \"lora_model\", # YOUR MODEL YOU USED FOR TRAINING\n load_in_4bit = load_in_4bit,\n )\n tokenizer = AutoTokenizer.from_pretrained(\"lora_model\")","metadata":{"id":"yFfaXG0WsQuE","execution":{"iopub.status.busy":"2024-04-06T16:30:20.289045Z","iopub.execute_input":"2024-04-06T16:30:20.289914Z","iopub.status.idle":"2024-04-06T16:30:20.294953Z","shell.execute_reply.started":"2024-04-06T16:30:20.289877Z","shell.execute_reply":"2024-04-06T16:30:20.293978Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":"### Saving to float16 for VLLM\n\nWe also support saving to `float16` directly. Select `merged_16bit` for float16 or `merged_4bit` for int4. We also allow `lora` adapters as a fallback. Use `push_to_hub_merged` to upload to your Hugging Face account! You can go to https://huggingface.co/settings/tokens for your personal tokens.","metadata":{"id":"f422JgM9sdVT"}},{"cell_type":"code","source":"# Merge to 16bit\nif False: model.save_pretrained_merged(\"model\", tokenizer, save_method = \"merged_16bit\",)\nif False: model.push_to_hub_merged(\"hf/model\", tokenizer, save_method = \"merged_16bit\", token = \"\")\n\n# Merge to 4bit\nif False: model.save_pretrained_merged(\"model\", tokenizer, save_method = \"merged_4bit\",)\nif False: model.push_to_hub_merged(\"hf/model\", tokenizer, save_method = \"merged_4bit\", token = \"\")\n\n# Just LoRA adapters\nif False: model.save_pretrained_merged(\"model\", tokenizer, save_method = \"lora\",)\nif False: model.push_to_hub_merged(\"hf/model\", tokenizer, save_method = \"lora\", token = \"\")","metadata":{"id":"iHjt_SMYsd3P","execution":{"iopub.status.busy":"2024-04-06T16:30:20.295979Z","iopub.execute_input":"2024-04-06T16:30:20.296285Z","iopub.status.idle":"2024-04-06T16:30:20.308979Z","shell.execute_reply.started":"2024-04-06T16:30:20.29626Z","shell.execute_reply":"2024-04-06T16:30:20.308167Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":"### GGUF / llama.cpp Conversion\nTo save to `GGUF` / `llama.cpp`, we support it natively now! We clone `llama.cpp` and we default save it to `q8_0`. We allow all methods like `q4_k_m`. Use `save_pretrained_gguf` for local saving and `push_to_hub_gguf` for uploading to HF.\n\nSome supported quant methods (full list on our [Wiki page](https://github.com/unslothai/unsloth/wiki#gguf-quantization-options)):\n* `q8_0` - Fast conversion. High resource use, but generally acceptable.\n* `q4_k_m` - Recommended. Uses Q6_K for half of the attention.wv and feed_forward.w2 tensors, else Q4_K.\n* `q5_k_m` - Recommended. Uses Q6_K for half of the attention.wv and feed_forward.w2 tensors, else Q5_K.","metadata":{"id":"TCv4vXHd61i7"}},{"cell_type":"code","source":"# Save to 8bit Q8_0\nif False: model.save_pretrained_gguf(\"model\", tokenizer,)\nif False: model.push_to_hub_gguf(\"hf/model\", tokenizer, token = \"\")\n\n# Save to 16bit GGUF\nif False: model.save_pretrained_gguf(\"model\", tokenizer, quantization_method = \"f16\")\nif False: model.push_to_hub_gguf(\"hf/model\", tokenizer, quantization_method = \"f16\", token = \"\")\n\n# Save to q4_k_m GGUF\nif False: model.save_pretrained_gguf(\"model\", tokenizer, quantization_method = \"q4_k_m\")\nif False: model.push_to_hub_gguf(\"hf/model\", tokenizer, quantization_method = \"q4_k_m\", token = \"\")","metadata":{"id":"FqfebeAdT073","execution":{"iopub.status.busy":"2024-04-06T16:30:20.310103Z","iopub.execute_input":"2024-04-06T16:30:20.310443Z","iopub.status.idle":"2024-04-06T16:30:20.324421Z","shell.execute_reply.started":"2024-04-06T16:30:20.310419Z","shell.execute_reply":"2024-04-06T16:30:20.323668Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":"Now, use the `model-unsloth.gguf` file or `model-unsloth-Q4_K_M.gguf` file in `llama.cpp` or a UI based system like `GPT4All`. You can install GPT4All by going [here](https://gpt4all.io/index.html).","metadata":{"id":"bDp0zNpwe6U_"}},{"cell_type":"markdown","source":"And we're done! If you have any questions on Unsloth, we have a [Discord](https://discord.gg/u54VK8m8tk) channel! If you find any bugs or want to keep updated with the latest LLM stuff, or need help, join projects etc, feel free to join our Discord!\n\nSome other links:\n1. Zephyr DPO 2x faster [free Colab](https://colab.research.google.com/drive/15vttTpzzVXv_tJwEk-hIcQ0S9FcEWvwP?usp=sharing)\n2. Llama 7b 2x faster [free Colab](https://colab.research.google.com/drive/1lBzz5KeZJKXjvivbYvmGarix9Ao6Wxe5?usp=sharing)\n3. TinyLlama 4x faster full Alpaca 52K in 1 hour [free Colab](https://colab.research.google.com/drive/1AZghoNBQaMDgWJpi4RbffGM1h6raLUj9?usp=sharing)\n4. CodeLlama 34b 2x faster [A100 on Colab](https://colab.research.google.com/drive/1y7A0AxE3y8gdj4AVkl2aZX47Xu3P1wJT?usp=sharing)\n5. Mistral 7b [free Kaggle version](https://www.kaggle.com/code/danielhanchen/kaggle-mistral-7b-unsloth-notebook)\n6. We also did a [blog](https://huggingface.co/blog/unsloth-trl) with 🤗 HuggingFace, and we're in the TRL [docs](https://huggingface.co/docs/trl/main/en/sft_trainer#accelerate-fine-tuning-2x-using-unsloth)!\n7. `ChatML` for ShareGPT datasets, [conversational notebook](https://colab.research.google.com/drive/1Aau3lgPzeZKQ-98h69CCu1UJcvIBLmy2?usp=sharing)\n8. Text completions like novel writing [notebook](https://colab.research.google.com/drive/1ef-tab5bhkvWmBOObepl1WgJvfvSzn5Q?usp=sharing)\n\n","metadata":{"id":"Zt9CHJqO6p30"}}]} \ No newline at end of file +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0ff91594", + "metadata": { + "id": "IqM-T1RTzY6C", + "papermill": { + "duration": 0.022416, + "end_time": "2024-11-19T19:01:59.936783", + "exception": false, + "start_time": "2024-11-19T19:01:59.914367", + "status": "completed" + }, + "tags": [] + }, + "source": [ + "To run this, press \"*Runtime*\" and press \"*Run all*\" on a **free** Tesla T4 Google Colab instance!\n", + "\n", + "\n", + "To install Unsloth on your own computer, follow the installation instructions on our Github page [here](https://github.com/unslothai/unsloth#installation-instructions---conda).\n", + "\n", + "You will learn how to do [data prep](#Data), how to [train](#Train), how to [run the model](#Inference), & [how to save it](#Save) (eg for Llama.cpp)." + ] + }, + { + "cell_type": "markdown", + "id": "9f31fd0e", + "metadata": { + "papermill": { + "duration": 0.01882, + "end_time": "2024-11-19T19:01:59.975791", + "exception": false, + "start_time": "2024-11-19T19:01:59.956971", + "status": "completed" + }, + "tags": [] + }, + "source": [ + "## Kaggle is slow - you'll have to wait **5 minutes** for it to install.\n", + "\n", + "I suggest you to use our free Colab notebooks instead. I linked our Mistral Colab notebook here: [notebook](https://colab.research.google.com/drive/1Dyauq4kTZoLewQ1cApceUQVNcnnNTzg_?usp=sharing)" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "5da70b6b", + "metadata": { + "execution": { + "iopub.execute_input": "2024-11-19T19:02:00.014824Z", + "iopub.status.busy": "2024-11-19T19:02:00.014491Z", + "iopub.status.idle": "2024-11-19T19:06:21.486688Z", + "shell.execute_reply": "2024-11-19T19:06:21.485746Z" + }, + "papermill": { + "duration": 261.495285, + "end_time": "2024-11-19T19:06:21.489744", + "exception": false, + "start_time": "2024-11-19T19:01:59.994459", + "status": "completed" + }, + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Collecting pip3-autoremove\r\n", + " Downloading pip3_autoremove-1.2.2-py2.py3-none-any.whl.metadata (2.2 kB)\r\n", + "Requirement already satisfied: pip in /opt/conda/lib/python3.10/site-packages (from pip3-autoremove) (24.0)\r\n", + "Requirement already satisfied: setuptools in /opt/conda/lib/python3.10/site-packages (from pip3-autoremove) (70.0.0)\r\n", + "Downloading pip3_autoremove-1.2.2-py2.py3-none-any.whl (6.7 kB)\r\n", + "Installing collected packages: pip3-autoremove\r\n", + "Successfully installed pip3-autoremove-1.2.2\r\n", + "dill 0.3.8 is installed but dill<0.3.2,>=0.3.1.1 is required\r\n", + "Redoing requirement with just package name...\r\n", + "cloudpickle 3.0.0 is installed but cloudpickle~=2.2.1 is required\r\n", + "Redoing requirement with just package name...\r\n", + "numpy 1.26.4 is installed but numpy<1.25.0,>=1.14.3 is required\r\n", + "Redoing requirement with just package name...\r\n", + "pyarrow 16.1.0 is installed but pyarrow<10.0.0,>=3.0.0 is required\r\n", + "Redoing requirement with just package name...\r\n", + "jupyterlab 4.2.5 is installed but jupyterlab~=3.6.0 is required\r\n", + "Redoing requirement with just package name...\r\n", + "google-cloud-bigquery 2.34.4 is installed but google-cloud-bigquery[bqstorage,pandas]>=3.10.0 is required\r\n", + "Redoing requirement with just package name...\r\n", + "google-cloud-storage 1.44.0 is installed but google-cloud-storage>=2.0.0 is required\r\n", + "Redoing requirement with just package name...\r\n", + "pandas 2.2.2 is installed but pandas<2.1.4,>=1.5.0 is required\r\n", + "Redoing requirement with just package name...\r\n", + "botocore 1.35.23 is installed but botocore<1.30.0,>=1.29.100 is required\r\n", + "Redoing requirement with just package name...\r\n", + "numpy 1.26.4 is installed but numpy<3.0,>=2.0 is required\r\n", + "Redoing requirement with just package name...\r\n", + "google-api-python-client 2.147.0 is installed but google-api-python-client==1.8.0 is required\r\n", + "Redoing requirement with just package name...\r\n", + "packaging 21.3 is installed but packaging>=23.0 is required\r\n", + "Redoing requirement with just package name...\r\n", + "The 'cubinlinker' distribution was not found and is required by the application\r\n", + "Skipping cubinlinker\r\n", + "cuda-python 12.6.0 is installed but cuda-python<12.0a0,>=11.7.1 is required\r\n", + "Redoing requirement with just package name...\r\n", + "The 'cupy-cuda11x>=12.0.0' distribution was not found and is required by the application\r\n", + "Skipping cupy-cuda11x\r\n", + "The 'ptxcompiler' distribution was not found and is required by the application\r\n", + "Skipping ptxcompiler\r\n", + "The 'cupy-cuda11x>=12.0.0' distribution was not found and is required by the application\r\n", + "Skipping cupy-cuda11x\r\n", + "The 'cupy-cuda11x>=12.0.0' distribution was not found and is required by the application\r\n", + "Skipping cupy-cuda11x\r\n", + "pydantic 2.9.2 is installed but pydantic~=1.10.0 is required\r\n", + "Redoing requirement with just package name...\r\n", + "dask 2024.9.1 is installed but dask==2024.7.1 is required\r\n", + "Redoing requirement with just package name...\r\n", + "The 'google.auth>=1.14.1' distribution was not found and is required by the application\r\n", + "Skipping google.auth\r\n", + "scipy 1.14.1 is installed but scipy<1.14.0,>=1.7.0 is required\r\n", + "Redoing requirement with just package name...\r\n", + "google-api-core 2.11.1 is installed but google-api-core[grpc]<2.0.0dev,>=1.22.2 is required\r\n", + "Redoing requirement with just package name...\r\n", + "google-api-core 2.11.1 is installed but google-api-core[grpc]<2.0.0dev,>=1.14.0 is required\r\n", + "Redoing requirement with just package name...\r\n", + "pyarrow 16.1.0 is installed but pyarrow<15,>=2 is required\r\n", + "Redoing requirement with just package name...\r\n", + "jupyter-lsp 1.5.1 is installed but jupyter-lsp>=2.0.0 is required\r\n", + "Redoing requirement with just package name...\r\n", + "jupyter-lsp 1.5.1 is installed but jupyter-lsp>=2.0.0 is required\r\n", + "Redoing requirement with just package name...\r\n", + "google-cloud-storage 1.44.0 is installed but google-cloud-storage<3,>=2.2.1 is required\r\n", + "Redoing requirement with just package name...\r\n", + "packaging 21.3 is installed but packaging>=22 is required\r\n", + "Redoing requirement with just package name...\r\n", + "Shapely 1.8.5.post1 is installed but shapely>=2.0.1 is required\r\n", + "Redoing requirement with just package name...\r\n", + "dill 0.3.8 is installed but dill>=0.3.9 is required\r\n", + "Redoing requirement with just package name...\r\n", + "multiprocess 0.70.16 is installed but multiprocess>=0.70.17 is required\r\n", + "Redoing requirement with just package name...\r\n", + "packaging 21.3 is installed but packaging>=23.2 is required\r\n", + "Redoing requirement with just package name...\r\n", + "dask 2024.9.1 is installed but dask==2024.7.1 is required\r\n", + "Redoing requirement with just package name...\r\n", + "cuda-python 12.6.0 is installed but cuda-python<12.0a0,>=11.7.1 is required\r\n", + "Redoing requirement with just package name...\r\n", + "nltk 3.2.4 is installed but nltk>=3.8 is required\r\n", + "Redoing requirement with just package name...\r\n", + "The 'libucx>=1.15.0' distribution was not found and is required by the application\r\n", + "Skipping libucx\r\n", + "packaging 21.3 is installed but packaging>=23.1 is required\r\n", + "Redoing requirement with just package name...\r\n", + "scipy 1.14.1 is installed but scipy<1.14,>=1.4.1 is required\r\n", + "Redoing requirement with just package name...\r\n", + "torch 2.4.0 (/opt/conda/lib/python3.10/site-packages)\r\n", + " sympy 1.13.3 (/opt/conda/lib/python3.10/site-packages)\r\n", + " mpmath 1.3.0 (/opt/conda/lib/python3.10/site-packages)\r\n", + "torchvision 0.19.0 (/opt/conda/lib/python3.10/site-packages)\r\n", + " torch 2.4.0 (/opt/conda/lib/python3.10/site-packages)\r\n", + " sympy 1.13.3 (/opt/conda/lib/python3.10/site-packages)\r\n", + " mpmath 1.3.0 (/opt/conda/lib/python3.10/site-packages)\r\n", + "torchaudio 2.4.0 (/opt/conda/lib/python3.10/site-packages)\r\n", + " torch 2.4.0 (/opt/conda/lib/python3.10/site-packages)\r\n", + " sympy 1.13.3 (/opt/conda/lib/python3.10/site-packages)\r\n", + " mpmath 1.3.0 (/opt/conda/lib/python3.10/site-packages)\r\n", + "Found existing installation: sympy 1.13.3\r\n", + "Uninstalling sympy-1.13.3:\r\n", + " Successfully uninstalled sympy-1.13.3\r\n", + "Found existing installation: torchvision 0.19.0\r\n", + "Uninstalling torchvision-0.19.0:\r\n", + " Successfully uninstalled torchvision-0.19.0\r\n", + "Found existing installation: mpmath 1.3.0\r\n", + "Uninstalling mpmath-1.3.0:\r\n", + " Successfully uninstalled mpmath-1.3.0\r\n", + "Found existing installation: torch 2.4.0\r\n", + "Uninstalling torch-2.4.0:\r\n", + " Successfully uninstalled torch-2.4.0\r\n", + "Found existing installation: torchaudio 2.4.0\r\n", + "Uninstalling torchaudio-2.4.0:\r\n", + " Successfully uninstalled torchaudio-2.4.0\r\n", + "Looking in indexes: https://download.pytorch.org/whl/cu121\r\n", + "Collecting torch\r\n", + " Downloading https://download.pytorch.org/whl/cu121/torch-2.5.1%2Bcu121-cp310-cp310-linux_x86_64.whl (780.4 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m780.4/780.4 MB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hCollecting torchvision\r\n", + " Downloading https://download.pytorch.org/whl/cu121/torchvision-0.20.1%2Bcu121-cp310-cp310-linux_x86_64.whl (7.3 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m7.3/7.3 MB\u001b[0m \u001b[31m3.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hCollecting torchaudio\r\n", + " Downloading https://download.pytorch.org/whl/cu121/torchaudio-2.5.1%2Bcu121-cp310-cp310-linux_x86_64.whl (3.4 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.4/3.4 MB\u001b[0m \u001b[31m70.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hCollecting xformers\r\n", + " Downloading https://download.pytorch.org/whl/cu121/xformers-0.0.28.post3-cp310-cp310-manylinux_2_28_x86_64.whl (16.7 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m16.7/16.7 MB\u001b[0m \u001b[31m82.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hRequirement already satisfied: filelock in /opt/conda/lib/python3.10/site-packages (from torch) (3.15.1)\r\n", + "Requirement already satisfied: typing-extensions>=4.8.0 in /opt/conda/lib/python3.10/site-packages (from torch) (4.12.2)\r\n", + "Requirement already satisfied: networkx in /opt/conda/lib/python3.10/site-packages (from torch) (3.3)\r\n", + "Requirement already satisfied: jinja2 in /opt/conda/lib/python3.10/site-packages (from torch) (3.1.4)\r\n", + "Requirement already satisfied: fsspec in /opt/conda/lib/python3.10/site-packages (from torch) (2024.6.1)\r\n", + "Collecting nvidia-cuda-nvrtc-cu12==12.1.105 (from torch)\r\n", + " Downloading https://download.pytorch.org/whl/cu121/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (23.7 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m23.7/23.7 MB\u001b[0m \u001b[31m72.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hCollecting nvidia-cuda-runtime-cu12==12.1.105 (from torch)\r\n", + " Downloading https://download.pytorch.org/whl/cu121/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (823 kB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m823.6/823.6 kB\u001b[0m \u001b[31m40.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hCollecting nvidia-cuda-cupti-cu12==12.1.105 (from torch)\r\n", + " Downloading https://download.pytorch.org/whl/cu121/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (14.1 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m14.1/14.1 MB\u001b[0m \u001b[31m84.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hCollecting nvidia-cudnn-cu12==9.1.0.70 (from torch)\r\n", + " Downloading https://download.pytorch.org/whl/cu121/nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl (664.8 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m664.8/664.8 MB\u001b[0m \u001b[31m2.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hCollecting nvidia-cublas-cu12==12.1.3.1 (from torch)\r\n", + " Downloading https://download.pytorch.org/whl/cu121/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl (410.6 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m410.6/410.6 MB\u001b[0m \u001b[31m4.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hCollecting nvidia-cufft-cu12==11.0.2.54 (from torch)\r\n", + " Downloading https://download.pytorch.org/whl/cu121/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl (121.6 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m121.6/121.6 MB\u001b[0m \u001b[31m13.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hCollecting nvidia-curand-cu12==10.3.2.106 (from torch)\r\n", + " Downloading https://download.pytorch.org/whl/cu121/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl (56.5 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m56.5/56.5 MB\u001b[0m \u001b[31m29.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hCollecting nvidia-cusolver-cu12==11.4.5.107 (from torch)\r\n", + " Downloading https://download.pytorch.org/whl/cu121/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl (124.2 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m124.2/124.2 MB\u001b[0m \u001b[31m13.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hCollecting nvidia-cusparse-cu12==12.1.0.106 (from torch)\r\n", + " Downloading https://download.pytorch.org/whl/cu121/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl (196.0 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m196.0/196.0 MB\u001b[0m \u001b[31m8.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hCollecting nvidia-nccl-cu12==2.21.5 (from torch)\r\n", + " Downloading https://download.pytorch.org/whl/nvidia_nccl_cu12-2.21.5-py3-none-manylinux2014_x86_64.whl (188.7 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m188.7/188.7 MB\u001b[0m \u001b[31m8.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hCollecting nvidia-nvtx-cu12==12.1.105 (from torch)\r\n", + " Downloading https://download.pytorch.org/whl/cu121/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (99 kB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m99.1/99.1 kB\u001b[0m \u001b[31m4.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hCollecting triton==3.1.0 (from torch)\r\n", + " Downloading https://download.pytorch.org/whl/triton-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (209.5 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m209.5/209.5 MB\u001b[0m \u001b[31m8.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hCollecting sympy==1.13.1 (from torch)\r\n", + " Downloading https://download.pytorch.org/whl/sympy-1.13.1-py3-none-any.whl (6.2 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m6.2/6.2 MB\u001b[0m \u001b[31m85.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hCollecting nvidia-nvjitlink-cu12 (from nvidia-cusolver-cu12==11.4.5.107->torch)\r\n", + " Downloading https://download.pytorch.org/whl/cu121/nvidia_nvjitlink_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (19.8 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m19.8/19.8 MB\u001b[0m \u001b[31m72.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hCollecting mpmath<1.4,>=1.1.0 (from sympy==1.13.1->torch)\r\n", + " Downloading https://download.pytorch.org/whl/mpmath-1.3.0-py3-none-any.whl (536 kB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m536.2/536.2 kB\u001b[0m \u001b[31m25.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hRequirement already satisfied: numpy in /opt/conda/lib/python3.10/site-packages (from torchvision) (1.26.4)\r\n", + "Requirement already satisfied: pillow!=8.3.*,>=5.3.0 in /opt/conda/lib/python3.10/site-packages (from torchvision) (10.3.0)\r\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /opt/conda/lib/python3.10/site-packages (from jinja2->torch) (2.1.5)\r\n", + "Installing collected packages: mpmath, triton, sympy, nvidia-nvtx-cu12, nvidia-nvjitlink-cu12, nvidia-nccl-cu12, nvidia-curand-cu12, nvidia-cufft-cu12, nvidia-cuda-runtime-cu12, nvidia-cuda-nvrtc-cu12, nvidia-cuda-cupti-cu12, nvidia-cublas-cu12, nvidia-cusparse-cu12, nvidia-cudnn-cu12, nvidia-cusolver-cu12, torch, xformers, torchvision, torchaudio\r\n", + "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\r\n", + "fastai 2.7.17 requires torch<2.5,>=1.10, but you have torch 2.5.1+cu121 which is incompatible.\u001b[0m\u001b[31m\r\n", + "\u001b[0mSuccessfully installed mpmath-1.3.0 nvidia-cublas-cu12-12.1.3.1 nvidia-cuda-cupti-cu12-12.1.105 nvidia-cuda-nvrtc-cu12-12.1.105 nvidia-cuda-runtime-cu12-12.1.105 nvidia-cudnn-cu12-9.1.0.70 nvidia-cufft-cu12-11.0.2.54 nvidia-curand-cu12-10.3.2.106 nvidia-cusolver-cu12-11.4.5.107 nvidia-cusparse-cu12-12.1.0.106 nvidia-nccl-cu12-2.21.5 nvidia-nvjitlink-cu12-12.1.105 nvidia-nvtx-cu12-12.1.105 sympy-1.13.1 torch-2.5.1+cu121 torchaudio-2.5.1+cu121 torchvision-0.20.1+cu121 triton-3.1.0 xformers-0.0.28.post3\r\n", + "Collecting unsloth[kaggle-new]\r\n", + " Downloading unsloth-2024.11.7-py3-none-any.whl.metadata (59 kB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m59.7/59.7 kB\u001b[0m \u001b[31m1.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hCollecting unsloth-zoo>=2024.11.1 (from unsloth[kaggle-new])\r\n", + " Downloading unsloth_zoo-2024.11.5-py3-none-any.whl.metadata (16 kB)\r\n", + "Requirement already satisfied: torch>=2.4.0 in /opt/conda/lib/python3.10/site-packages (from unsloth[kaggle-new]) (2.5.1+cu121)\r\n", + "Requirement already satisfied: xformers>=0.0.27.post2 in /opt/conda/lib/python3.10/site-packages (from unsloth[kaggle-new]) (0.0.28.post3)\r\n", + "Collecting bitsandbytes (from unsloth[kaggle-new])\r\n", + " Downloading bitsandbytes-0.44.1-py3-none-manylinux_2_24_x86_64.whl.metadata (3.5 kB)\r\n", + "Requirement already satisfied: triton>=3.0.0 in /opt/conda/lib/python3.10/site-packages (from unsloth[kaggle-new]) (3.1.0)\r\n", + "Requirement already satisfied: packaging in /opt/conda/lib/python3.10/site-packages (from unsloth[kaggle-new]) (21.3)\r\n", + "Collecting tyro (from unsloth[kaggle-new])\r\n", + " Downloading tyro-0.9.1-py3-none-any.whl.metadata (9.3 kB)\r\n", + "Collecting transformers>=4.46.1 (from unsloth[kaggle-new])\r\n", + " Downloading transformers-4.46.3-py3-none-any.whl.metadata (44 kB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m44.1/44.1 kB\u001b[0m \u001b[31m2.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hRequirement already satisfied: datasets>=2.16.0 in /opt/conda/lib/python3.10/site-packages (from unsloth[kaggle-new]) (3.0.1)\r\n", + "Requirement already satisfied: sentencepiece>=0.2.0 in /opt/conda/lib/python3.10/site-packages (from unsloth[kaggle-new]) (0.2.0)\r\n", + "Requirement already satisfied: tqdm in /opt/conda/lib/python3.10/site-packages (from unsloth[kaggle-new]) (4.66.4)\r\n", + "Requirement already satisfied: psutil in /opt/conda/lib/python3.10/site-packages (from unsloth[kaggle-new]) (5.9.3)\r\n", + "Requirement already satisfied: wheel>=0.42.0 in /opt/conda/lib/python3.10/site-packages (from unsloth[kaggle-new]) (0.43.0)\r\n", + "Requirement already satisfied: numpy in /opt/conda/lib/python3.10/site-packages (from unsloth[kaggle-new]) (1.26.4)\r\n", + "Requirement already satisfied: accelerate>=0.34.1 in /opt/conda/lib/python3.10/site-packages (from unsloth[kaggle-new]) (0.34.2)\r\n", + "Collecting trl!=0.9.0,!=0.9.1,!=0.9.2,!=0.9.3,>=0.7.9 (from unsloth[kaggle-new])\r\n", + " Downloading trl-0.12.1-py3-none-any.whl.metadata (10 kB)\r\n", + "Collecting peft!=0.11.0,>=0.7.1 (from unsloth[kaggle-new])\r\n", + " Downloading peft-0.13.2-py3-none-any.whl.metadata (13 kB)\r\n", + "Requirement already satisfied: protobuf<4.0.0 in /opt/conda/lib/python3.10/site-packages (from unsloth[kaggle-new]) (3.20.3)\r\n", + "Requirement already satisfied: huggingface-hub in /opt/conda/lib/python3.10/site-packages (from unsloth[kaggle-new]) (0.25.1)\r\n", + "Collecting hf-transfer (from unsloth[kaggle-new])\r\n", + " Downloading hf_transfer-0.1.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (1.7 kB)\r\n", + "Requirement already satisfied: pyyaml in /opt/conda/lib/python3.10/site-packages (from accelerate>=0.34.1->unsloth[kaggle-new]) (6.0.2)\r\n", + "Requirement already satisfied: safetensors>=0.4.3 in /opt/conda/lib/python3.10/site-packages (from accelerate>=0.34.1->unsloth[kaggle-new]) (0.4.5)\r\n", + "Requirement already satisfied: filelock in /opt/conda/lib/python3.10/site-packages (from datasets>=2.16.0->unsloth[kaggle-new]) (3.15.1)\r\n", + "Requirement already satisfied: pyarrow>=15.0.0 in /opt/conda/lib/python3.10/site-packages (from datasets>=2.16.0->unsloth[kaggle-new]) (16.1.0)\r\n", + "Requirement already satisfied: dill<0.3.9,>=0.3.0 in /opt/conda/lib/python3.10/site-packages (from datasets>=2.16.0->unsloth[kaggle-new]) (0.3.8)\r\n", + "Requirement already satisfied: pandas in /opt/conda/lib/python3.10/site-packages (from datasets>=2.16.0->unsloth[kaggle-new]) (2.2.2)\r\n", + "Requirement already satisfied: requests>=2.32.2 in /opt/conda/lib/python3.10/site-packages (from datasets>=2.16.0->unsloth[kaggle-new]) (2.32.3)\r\n", + "Requirement already satisfied: xxhash in /opt/conda/lib/python3.10/site-packages (from datasets>=2.16.0->unsloth[kaggle-new]) (3.4.1)\r\n", + "Requirement already satisfied: multiprocess in /opt/conda/lib/python3.10/site-packages (from datasets>=2.16.0->unsloth[kaggle-new]) (0.70.16)\r\n", + "Requirement already satisfied: fsspec<=2024.6.1,>=2023.1.0 in /opt/conda/lib/python3.10/site-packages (from fsspec[http]<=2024.6.1,>=2023.1.0->datasets>=2.16.0->unsloth[kaggle-new]) (2024.6.1)\r\n", + "Requirement already satisfied: aiohttp in /opt/conda/lib/python3.10/site-packages (from datasets>=2.16.0->unsloth[kaggle-new]) (3.9.5)\r\n", + "Requirement already satisfied: typing-extensions>=3.7.4.3 in /opt/conda/lib/python3.10/site-packages (from huggingface-hub->unsloth[kaggle-new]) (4.12.2)\r\n", + "Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /opt/conda/lib/python3.10/site-packages (from packaging->unsloth[kaggle-new]) (3.1.2)\r\n", + "Requirement already satisfied: networkx in /opt/conda/lib/python3.10/site-packages (from torch>=2.4.0->unsloth[kaggle-new]) (3.3)\r\n", + "Requirement already satisfied: jinja2 in /opt/conda/lib/python3.10/site-packages (from torch>=2.4.0->unsloth[kaggle-new]) (3.1.4)\r\n", + "Requirement already satisfied: nvidia-cuda-nvrtc-cu12==12.1.105 in /opt/conda/lib/python3.10/site-packages (from torch>=2.4.0->unsloth[kaggle-new]) (12.1.105)\r\n", + "Requirement already satisfied: nvidia-cuda-runtime-cu12==12.1.105 in /opt/conda/lib/python3.10/site-packages (from torch>=2.4.0->unsloth[kaggle-new]) (12.1.105)\r\n", + "Requirement already satisfied: nvidia-cuda-cupti-cu12==12.1.105 in /opt/conda/lib/python3.10/site-packages (from torch>=2.4.0->unsloth[kaggle-new]) (12.1.105)\r\n", + "Requirement already satisfied: nvidia-cudnn-cu12==9.1.0.70 in /opt/conda/lib/python3.10/site-packages (from torch>=2.4.0->unsloth[kaggle-new]) (9.1.0.70)\r\n", + "Requirement already satisfied: nvidia-cublas-cu12==12.1.3.1 in /opt/conda/lib/python3.10/site-packages (from torch>=2.4.0->unsloth[kaggle-new]) (12.1.3.1)\r\n", + "Requirement already satisfied: nvidia-cufft-cu12==11.0.2.54 in /opt/conda/lib/python3.10/site-packages (from torch>=2.4.0->unsloth[kaggle-new]) (11.0.2.54)\r\n", + "Requirement already satisfied: nvidia-curand-cu12==10.3.2.106 in /opt/conda/lib/python3.10/site-packages (from torch>=2.4.0->unsloth[kaggle-new]) (10.3.2.106)\r\n", + "Requirement already satisfied: nvidia-cusolver-cu12==11.4.5.107 in /opt/conda/lib/python3.10/site-packages (from torch>=2.4.0->unsloth[kaggle-new]) (11.4.5.107)\r\n", + "Requirement already satisfied: nvidia-cusparse-cu12==12.1.0.106 in /opt/conda/lib/python3.10/site-packages (from torch>=2.4.0->unsloth[kaggle-new]) (12.1.0.106)\r\n", + "Requirement already satisfied: nvidia-nccl-cu12==2.21.5 in /opt/conda/lib/python3.10/site-packages (from torch>=2.4.0->unsloth[kaggle-new]) (2.21.5)\r\n", + "Requirement already satisfied: nvidia-nvtx-cu12==12.1.105 in /opt/conda/lib/python3.10/site-packages (from torch>=2.4.0->unsloth[kaggle-new]) (12.1.105)\r\n", + "Requirement already satisfied: sympy==1.13.1 in /opt/conda/lib/python3.10/site-packages (from torch>=2.4.0->unsloth[kaggle-new]) (1.13.1)\r\n", + "Requirement already satisfied: nvidia-nvjitlink-cu12 in /opt/conda/lib/python3.10/site-packages (from nvidia-cusolver-cu12==11.4.5.107->torch>=2.4.0->unsloth[kaggle-new]) (12.1.105)\r\n", + "Requirement already satisfied: mpmath<1.4,>=1.1.0 in /opt/conda/lib/python3.10/site-packages (from sympy==1.13.1->torch>=2.4.0->unsloth[kaggle-new]) (1.3.0)\r\n", + "Requirement already satisfied: regex!=2019.12.17 in /opt/conda/lib/python3.10/site-packages (from transformers>=4.46.1->unsloth[kaggle-new]) (2024.5.15)\r\n", + "Requirement already satisfied: tokenizers<0.21,>=0.20 in /opt/conda/lib/python3.10/site-packages (from transformers>=4.46.1->unsloth[kaggle-new]) (0.20.0)\r\n", + "Requirement already satisfied: rich in /opt/conda/lib/python3.10/site-packages (from trl!=0.9.0,!=0.9.1,!=0.9.2,!=0.9.3,>=0.7.9->unsloth[kaggle-new]) (13.7.1)\r\n", + "Requirement already satisfied: docstring-parser>=0.16 in /opt/conda/lib/python3.10/site-packages (from tyro->unsloth[kaggle-new]) (0.16)\r\n", + "Collecting shtab>=1.5.6 (from tyro->unsloth[kaggle-new])\r\n", + " Downloading shtab-1.7.1-py3-none-any.whl.metadata (7.3 kB)\r\n", + "Requirement already satisfied: aiosignal>=1.1.2 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets>=2.16.0->unsloth[kaggle-new]) (1.3.1)\r\n", + "Requirement already satisfied: attrs>=17.3.0 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets>=2.16.0->unsloth[kaggle-new]) (23.2.0)\r\n", + "Requirement already satisfied: frozenlist>=1.1.1 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets>=2.16.0->unsloth[kaggle-new]) (1.4.1)\r\n", + "Requirement already satisfied: multidict<7.0,>=4.5 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets>=2.16.0->unsloth[kaggle-new]) (6.0.5)\r\n", + "Requirement already satisfied: yarl<2.0,>=1.0 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets>=2.16.0->unsloth[kaggle-new]) (1.9.4)\r\n", + "Requirement already satisfied: async-timeout<5.0,>=4.0 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets>=2.16.0->unsloth[kaggle-new]) (4.0.3)\r\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /opt/conda/lib/python3.10/site-packages (from requests>=2.32.2->datasets>=2.16.0->unsloth[kaggle-new]) (3.3.2)\r\n", + "Requirement already satisfied: idna<4,>=2.5 in /opt/conda/lib/python3.10/site-packages (from requests>=2.32.2->datasets>=2.16.0->unsloth[kaggle-new]) (3.7)\r\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/conda/lib/python3.10/site-packages (from requests>=2.32.2->datasets>=2.16.0->unsloth[kaggle-new]) (1.26.18)\r\n", + "Requirement already satisfied: certifi>=2017.4.17 in /opt/conda/lib/python3.10/site-packages (from requests>=2.32.2->datasets>=2.16.0->unsloth[kaggle-new]) (2024.8.30)\r\n", + "Requirement already satisfied: markdown-it-py>=2.2.0 in /opt/conda/lib/python3.10/site-packages (from rich->trl!=0.9.0,!=0.9.1,!=0.9.2,!=0.9.3,>=0.7.9->unsloth[kaggle-new]) (3.0.0)\r\n", + "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /opt/conda/lib/python3.10/site-packages (from rich->trl!=0.9.0,!=0.9.1,!=0.9.2,!=0.9.3,>=0.7.9->unsloth[kaggle-new]) (2.18.0)\r\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /opt/conda/lib/python3.10/site-packages (from jinja2->torch>=2.4.0->unsloth[kaggle-new]) (2.1.5)\r\n", + "Requirement already satisfied: python-dateutil>=2.8.2 in /opt/conda/lib/python3.10/site-packages (from pandas->datasets>=2.16.0->unsloth[kaggle-new]) (2.9.0.post0)\r\n", + "Requirement already satisfied: pytz>=2020.1 in /opt/conda/lib/python3.10/site-packages (from pandas->datasets>=2.16.0->unsloth[kaggle-new]) (2024.1)\r\n", + "Requirement already satisfied: tzdata>=2022.7 in /opt/conda/lib/python3.10/site-packages (from pandas->datasets>=2.16.0->unsloth[kaggle-new]) (2024.1)\r\n", + "Requirement already satisfied: mdurl~=0.1 in /opt/conda/lib/python3.10/site-packages (from markdown-it-py>=2.2.0->rich->trl!=0.9.0,!=0.9.1,!=0.9.2,!=0.9.3,>=0.7.9->unsloth[kaggle-new]) (0.1.2)\r\n", + "Requirement already satisfied: six>=1.5 in /opt/conda/lib/python3.10/site-packages (from python-dateutil>=2.8.2->pandas->datasets>=2.16.0->unsloth[kaggle-new]) (1.16.0)\r\n", + "Downloading bitsandbytes-0.44.1-py3-none-manylinux_2_24_x86_64.whl (122.4 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m122.4/122.4 MB\u001b[0m \u001b[31m14.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hDownloading peft-0.13.2-py3-none-any.whl (320 kB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m320.7/320.7 kB\u001b[0m \u001b[31m17.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hDownloading transformers-4.46.3-py3-none-any.whl (10.0 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m10.0/10.0 MB\u001b[0m \u001b[31m98.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hDownloading trl-0.12.1-py3-none-any.whl (310 kB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m310.9/310.9 kB\u001b[0m \u001b[31m16.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hDownloading unsloth_zoo-2024.11.5-py3-none-any.whl (31 kB)\r\n", + "Downloading hf_transfer-0.1.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.6 MB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.6/3.6 MB\u001b[0m \u001b[31m82.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hDownloading tyro-0.9.1-py3-none-any.whl (111 kB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m111.9/111.9 kB\u001b[0m \u001b[31m6.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hDownloading unsloth-2024.11.7-py3-none-any.whl (163 kB)\r\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m163.9/163.9 kB\u001b[0m \u001b[31m9.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\r\n", + "\u001b[?25hDownloading shtab-1.7.1-py3-none-any.whl (14 kB)\r\n", + "Installing collected packages: shtab, hf-transfer, tyro, transformers, bitsandbytes, trl, peft, unsloth-zoo, unsloth\r\n", + " Attempting uninstall: transformers\r\n", + " Found existing installation: transformers 4.45.1\r\n", + " Uninstalling transformers-4.45.1:\r\n", + " Successfully uninstalled transformers-4.45.1\r\n", + "Successfully installed bitsandbytes-0.44.1 hf-transfer-0.1.8 peft-0.13.2 shtab-1.7.1 transformers-4.46.3 trl-0.12.1 tyro-0.9.1 unsloth-2024.11.7 unsloth-zoo-2024.11.5\r\n", + "Found existing installation: unsloth 2024.11.7\r\n", + "Uninstalling unsloth-2024.11.7:\r\n", + " Successfully uninstalled unsloth-2024.11.7\r\n", + "Collecting git+https://github.com/unslothai/unsloth.git@a2f8db3e7341f983af5814a2c56f54fa29ee548d\r\n", + " Cloning https://github.com/unslothai/unsloth.git (to revision a2f8db3e7341f983af5814a2c56f54fa29ee548d) to /tmp/pip-req-build-7w3hakz0\r\n", + " Running command git clone --filter=blob:none --quiet https://github.com/unslothai/unsloth.git /tmp/pip-req-build-7w3hakz0\r\n", + " Running command git rev-parse -q --verify 'sha^a2f8db3e7341f983af5814a2c56f54fa29ee548d'\r\n", + " Running command git fetch -q https://github.com/unslothai/unsloth.git a2f8db3e7341f983af5814a2c56f54fa29ee548d\r\n", + " Running command git checkout -q a2f8db3e7341f983af5814a2c56f54fa29ee548d\r\n", + " Resolved https://github.com/unslothai/unsloth.git to commit a2f8db3e7341f983af5814a2c56f54fa29ee548d\r\n", + " Installing build dependencies ... \u001b[?25l-\b \b\\\b \b|\b \b/\b \b-\b \b\\\b \b|\b \bdone\r\n", + "\u001b[?25h Getting requirements to build wheel ... \u001b[?25l-\b \bdone\r\n", + "\u001b[?25h Preparing metadata (pyproject.toml) ... \u001b[?25l-\b \bdone\r\n", + "\u001b[?25hBuilding wheels for collected packages: unsloth\r\n", + " Building wheel for unsloth (pyproject.toml) ... \u001b[?25l-\b \b\\\b \bdone\r\n", + "\u001b[?25h Created wheel for unsloth: filename=unsloth-2024.10.7-py3-none-any.whl size=164376 sha256=318d24041afad463f487f3927388d766e913ffa5b694f3e2e3b1a7851fa67a1c\r\n", + " Stored in directory: /root/.cache/pip/wheels/d5/c3/0d/98b9068092121456c620edb0a24e05fda5934229b776b63a7b\r\n", + "Successfully built unsloth\r\n", + "Installing collected packages: unsloth\r\n", + "Successfully installed unsloth-2024.10.7\r\n" + ] + } + ], + "source": [ + "#%%capture\n", + "!pip install pip3-autoremove\n", + "!pip-autoremove torch torchvision torchaudio -y\n", + "!pip install torch torchvision torchaudio xformers --index-url https://download.pytorch.org/whl/cu121\n", + "# https://github.com/unslothai/unsloth/issues/1284\n", + "!pip install unsloth[kaggle-new]\n", + "# Also get the latest nightly Unsloth!\n", + "!pip uninstall unsloth -y && pip install git+https://github.com/unslothai/unsloth.git@a2f8db3e7341f983af5814a2c56f54fa29ee548d" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "6018b225", + "metadata": { + "execution": { + "iopub.execute_input": "2024-11-19T19:06:21.619747Z", + "iopub.status.busy": "2024-11-19T19:06:21.618961Z", + "iopub.status.idle": "2024-11-19T19:06:41.479598Z", + "shell.execute_reply": "2024-11-19T19:06:41.478738Z" + }, + "papermill": { + "duration": 19.925903, + "end_time": "2024-11-19T19:06:41.482153", + "exception": false, + "start_time": "2024-11-19T19:06:21.556250", + "status": "completed" + }, + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Collecting git+https://github.com/unslothai/unsloth-zoo.git\r\n", + " Cloning https://github.com/unslothai/unsloth-zoo.git to /tmp/pip-req-build-0xpxksif\r\n", + " Running command git clone --filter=blob:none --quiet https://github.com/unslothai/unsloth-zoo.git /tmp/pip-req-build-0xpxksif\r\n", + " Resolved https://github.com/unslothai/unsloth-zoo.git to commit f5421838ef8278cf96d0092d8271ecd6d433588c\r\n", + " Installing build dependencies ... \u001b[?25l-\b \b\\\b \b|\b \b/\b \bdone\r\n", + "\u001b[?25h Getting requirements to build wheel ... \u001b[?25l-\b \bdone\r\n", + "\u001b[?25h Preparing metadata (pyproject.toml) ... \u001b[?25l-\b \bdone\r\n", + "\u001b[?25hRequirement already satisfied: torch in /opt/conda/lib/python3.10/site-packages (from unsloth_zoo==2024.11.5) (2.5.1+cu121)\r\n", + "Requirement already satisfied: triton in /opt/conda/lib/python3.10/site-packages (from unsloth_zoo==2024.11.5) (3.1.0)\r\n", + "Requirement already satisfied: packaging in /opt/conda/lib/python3.10/site-packages (from unsloth_zoo==2024.11.5) (21.3)\r\n", + "Requirement already satisfied: tyro in /opt/conda/lib/python3.10/site-packages (from unsloth_zoo==2024.11.5) (0.9.1)\r\n", + "Requirement already satisfied: transformers>=4.46.1 in /opt/conda/lib/python3.10/site-packages (from unsloth_zoo==2024.11.5) (4.46.3)\r\n", + "Requirement already satisfied: datasets>=2.16.0 in /opt/conda/lib/python3.10/site-packages (from unsloth_zoo==2024.11.5) (3.0.1)\r\n", + "Requirement already satisfied: sentencepiece>=0.2.0 in /opt/conda/lib/python3.10/site-packages (from unsloth_zoo==2024.11.5) (0.2.0)\r\n", + "Requirement already satisfied: tqdm in /opt/conda/lib/python3.10/site-packages (from unsloth_zoo==2024.11.5) (4.66.4)\r\n", + "Requirement already satisfied: psutil in /opt/conda/lib/python3.10/site-packages (from unsloth_zoo==2024.11.5) (5.9.3)\r\n", + "Requirement already satisfied: wheel>=0.42.0 in /opt/conda/lib/python3.10/site-packages (from unsloth_zoo==2024.11.5) (0.43.0)\r\n", + "Requirement already satisfied: numpy in /opt/conda/lib/python3.10/site-packages (from unsloth_zoo==2024.11.5) (1.26.4)\r\n", + "Requirement already satisfied: accelerate>=0.34.1 in /opt/conda/lib/python3.10/site-packages (from unsloth_zoo==2024.11.5) (0.34.2)\r\n", + "Requirement already satisfied: trl!=0.9.0,!=0.9.1,!=0.9.2,!=0.9.3,>=0.7.9 in /opt/conda/lib/python3.10/site-packages (from unsloth_zoo==2024.11.5) (0.12.1)\r\n", + "Requirement already satisfied: peft!=0.11.0,>=0.7.1 in /opt/conda/lib/python3.10/site-packages (from unsloth_zoo==2024.11.5) (0.13.2)\r\n", + "Requirement already satisfied: protobuf<4.0.0 in /opt/conda/lib/python3.10/site-packages (from unsloth_zoo==2024.11.5) (3.20.3)\r\n", + "Requirement already satisfied: huggingface-hub in /opt/conda/lib/python3.10/site-packages (from unsloth_zoo==2024.11.5) (0.25.1)\r\n", + "Requirement already satisfied: hf-transfer in /opt/conda/lib/python3.10/site-packages (from unsloth_zoo==2024.11.5) (0.1.8)\r\n", + "Requirement already satisfied: pyyaml in /opt/conda/lib/python3.10/site-packages (from accelerate>=0.34.1->unsloth_zoo==2024.11.5) (6.0.2)\r\n", + "Requirement already satisfied: safetensors>=0.4.3 in /opt/conda/lib/python3.10/site-packages (from accelerate>=0.34.1->unsloth_zoo==2024.11.5) (0.4.5)\r\n", + "Requirement already satisfied: filelock in /opt/conda/lib/python3.10/site-packages (from datasets>=2.16.0->unsloth_zoo==2024.11.5) (3.15.1)\r\n", + "Requirement already satisfied: pyarrow>=15.0.0 in /opt/conda/lib/python3.10/site-packages (from datasets>=2.16.0->unsloth_zoo==2024.11.5) (16.1.0)\r\n", + "Requirement already satisfied: dill<0.3.9,>=0.3.0 in /opt/conda/lib/python3.10/site-packages (from datasets>=2.16.0->unsloth_zoo==2024.11.5) (0.3.8)\r\n", + "Requirement already satisfied: pandas in /opt/conda/lib/python3.10/site-packages (from datasets>=2.16.0->unsloth_zoo==2024.11.5) (2.2.2)\r\n", + "Requirement already satisfied: requests>=2.32.2 in /opt/conda/lib/python3.10/site-packages (from datasets>=2.16.0->unsloth_zoo==2024.11.5) (2.32.3)\r\n", + "Requirement already satisfied: xxhash in /opt/conda/lib/python3.10/site-packages (from datasets>=2.16.0->unsloth_zoo==2024.11.5) (3.4.1)\r\n", + "Requirement already satisfied: multiprocess in /opt/conda/lib/python3.10/site-packages (from datasets>=2.16.0->unsloth_zoo==2024.11.5) (0.70.16)\r\n", + "Requirement already satisfied: fsspec<=2024.6.1,>=2023.1.0 in /opt/conda/lib/python3.10/site-packages (from fsspec[http]<=2024.6.1,>=2023.1.0->datasets>=2.16.0->unsloth_zoo==2024.11.5) (2024.6.1)\r\n", + "Requirement already satisfied: aiohttp in /opt/conda/lib/python3.10/site-packages (from datasets>=2.16.0->unsloth_zoo==2024.11.5) (3.9.5)\r\n", + "Requirement already satisfied: typing-extensions>=3.7.4.3 in /opt/conda/lib/python3.10/site-packages (from huggingface-hub->unsloth_zoo==2024.11.5) (4.12.2)\r\n", + "Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /opt/conda/lib/python3.10/site-packages (from packaging->unsloth_zoo==2024.11.5) (3.1.2)\r\n", + "Requirement already satisfied: networkx in /opt/conda/lib/python3.10/site-packages (from torch->unsloth_zoo==2024.11.5) (3.3)\r\n", + "Requirement already satisfied: jinja2 in /opt/conda/lib/python3.10/site-packages (from torch->unsloth_zoo==2024.11.5) (3.1.4)\r\n", + "Requirement already satisfied: nvidia-cuda-nvrtc-cu12==12.1.105 in /opt/conda/lib/python3.10/site-packages (from torch->unsloth_zoo==2024.11.5) (12.1.105)\r\n", + "Requirement already satisfied: nvidia-cuda-runtime-cu12==12.1.105 in /opt/conda/lib/python3.10/site-packages (from torch->unsloth_zoo==2024.11.5) (12.1.105)\r\n", + "Requirement already satisfied: nvidia-cuda-cupti-cu12==12.1.105 in /opt/conda/lib/python3.10/site-packages (from torch->unsloth_zoo==2024.11.5) (12.1.105)\r\n", + "Requirement already satisfied: nvidia-cudnn-cu12==9.1.0.70 in /opt/conda/lib/python3.10/site-packages (from torch->unsloth_zoo==2024.11.5) (9.1.0.70)\r\n", + "Requirement already satisfied: nvidia-cublas-cu12==12.1.3.1 in /opt/conda/lib/python3.10/site-packages (from torch->unsloth_zoo==2024.11.5) (12.1.3.1)\r\n", + "Requirement already satisfied: nvidia-cufft-cu12==11.0.2.54 in /opt/conda/lib/python3.10/site-packages (from torch->unsloth_zoo==2024.11.5) (11.0.2.54)\r\n", + "Requirement already satisfied: nvidia-curand-cu12==10.3.2.106 in /opt/conda/lib/python3.10/site-packages (from torch->unsloth_zoo==2024.11.5) (10.3.2.106)\r\n", + "Requirement already satisfied: nvidia-cusolver-cu12==11.4.5.107 in /opt/conda/lib/python3.10/site-packages (from torch->unsloth_zoo==2024.11.5) (11.4.5.107)\r\n", + "Requirement already satisfied: nvidia-cusparse-cu12==12.1.0.106 in /opt/conda/lib/python3.10/site-packages (from torch->unsloth_zoo==2024.11.5) (12.1.0.106)\r\n", + "Requirement already satisfied: nvidia-nccl-cu12==2.21.5 in /opt/conda/lib/python3.10/site-packages (from torch->unsloth_zoo==2024.11.5) (2.21.5)\r\n", + "Requirement already satisfied: nvidia-nvtx-cu12==12.1.105 in /opt/conda/lib/python3.10/site-packages (from torch->unsloth_zoo==2024.11.5) (12.1.105)\r\n", + "Requirement already satisfied: sympy==1.13.1 in /opt/conda/lib/python3.10/site-packages (from torch->unsloth_zoo==2024.11.5) (1.13.1)\r\n", + "Requirement already satisfied: nvidia-nvjitlink-cu12 in /opt/conda/lib/python3.10/site-packages (from nvidia-cusolver-cu12==11.4.5.107->torch->unsloth_zoo==2024.11.5) (12.1.105)\r\n", + "Requirement already satisfied: mpmath<1.4,>=1.1.0 in /opt/conda/lib/python3.10/site-packages (from sympy==1.13.1->torch->unsloth_zoo==2024.11.5) (1.3.0)\r\n", + "Requirement already satisfied: regex!=2019.12.17 in /opt/conda/lib/python3.10/site-packages (from transformers>=4.46.1->unsloth_zoo==2024.11.5) (2024.5.15)\r\n", + "Requirement already satisfied: tokenizers<0.21,>=0.20 in /opt/conda/lib/python3.10/site-packages (from transformers>=4.46.1->unsloth_zoo==2024.11.5) (0.20.0)\r\n", + "Requirement already satisfied: rich in /opt/conda/lib/python3.10/site-packages (from trl!=0.9.0,!=0.9.1,!=0.9.2,!=0.9.3,>=0.7.9->unsloth_zoo==2024.11.5) (13.7.1)\r\n", + "Requirement already satisfied: docstring-parser>=0.16 in /opt/conda/lib/python3.10/site-packages (from tyro->unsloth_zoo==2024.11.5) (0.16)\r\n", + "Requirement already satisfied: shtab>=1.5.6 in /opt/conda/lib/python3.10/site-packages (from tyro->unsloth_zoo==2024.11.5) (1.7.1)\r\n", + "Requirement already satisfied: aiosignal>=1.1.2 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets>=2.16.0->unsloth_zoo==2024.11.5) (1.3.1)\r\n", + "Requirement already satisfied: attrs>=17.3.0 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets>=2.16.0->unsloth_zoo==2024.11.5) (23.2.0)\r\n", + "Requirement already satisfied: frozenlist>=1.1.1 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets>=2.16.0->unsloth_zoo==2024.11.5) (1.4.1)\r\n", + "Requirement already satisfied: multidict<7.0,>=4.5 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets>=2.16.0->unsloth_zoo==2024.11.5) (6.0.5)\r\n", + "Requirement already satisfied: yarl<2.0,>=1.0 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets>=2.16.0->unsloth_zoo==2024.11.5) (1.9.4)\r\n", + "Requirement already satisfied: async-timeout<5.0,>=4.0 in /opt/conda/lib/python3.10/site-packages (from aiohttp->datasets>=2.16.0->unsloth_zoo==2024.11.5) (4.0.3)\r\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /opt/conda/lib/python3.10/site-packages (from requests>=2.32.2->datasets>=2.16.0->unsloth_zoo==2024.11.5) (3.3.2)\r\n", + "Requirement already satisfied: idna<4,>=2.5 in /opt/conda/lib/python3.10/site-packages (from requests>=2.32.2->datasets>=2.16.0->unsloth_zoo==2024.11.5) (3.7)\r\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/conda/lib/python3.10/site-packages (from requests>=2.32.2->datasets>=2.16.0->unsloth_zoo==2024.11.5) (1.26.18)\r\n", + "Requirement already satisfied: certifi>=2017.4.17 in /opt/conda/lib/python3.10/site-packages (from requests>=2.32.2->datasets>=2.16.0->unsloth_zoo==2024.11.5) (2024.8.30)\r\n", + "Requirement already satisfied: markdown-it-py>=2.2.0 in /opt/conda/lib/python3.10/site-packages (from rich->trl!=0.9.0,!=0.9.1,!=0.9.2,!=0.9.3,>=0.7.9->unsloth_zoo==2024.11.5) (3.0.0)\r\n", + "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /opt/conda/lib/python3.10/site-packages (from rich->trl!=0.9.0,!=0.9.1,!=0.9.2,!=0.9.3,>=0.7.9->unsloth_zoo==2024.11.5) (2.18.0)\r\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /opt/conda/lib/python3.10/site-packages (from jinja2->torch->unsloth_zoo==2024.11.5) (2.1.5)\r\n", + "Requirement already satisfied: python-dateutil>=2.8.2 in /opt/conda/lib/python3.10/site-packages (from pandas->datasets>=2.16.0->unsloth_zoo==2024.11.5) (2.9.0.post0)\r\n", + "Requirement already satisfied: pytz>=2020.1 in /opt/conda/lib/python3.10/site-packages (from pandas->datasets>=2.16.0->unsloth_zoo==2024.11.5) (2024.1)\r\n", + "Requirement already satisfied: tzdata>=2022.7 in /opt/conda/lib/python3.10/site-packages (from pandas->datasets>=2.16.0->unsloth_zoo==2024.11.5) (2024.1)\r\n", + "Requirement already satisfied: mdurl~=0.1 in /opt/conda/lib/python3.10/site-packages (from markdown-it-py>=2.2.0->rich->trl!=0.9.0,!=0.9.1,!=0.9.2,!=0.9.3,>=0.7.9->unsloth_zoo==2024.11.5) (0.1.2)\r\n", + "Requirement already satisfied: six>=1.5 in /opt/conda/lib/python3.10/site-packages (from python-dateutil>=2.8.2->pandas->datasets>=2.16.0->unsloth_zoo==2024.11.5) (1.16.0)\r\n" + ] + } + ], + "source": [ + "!pip install git+https://github.com/unslothai/unsloth-zoo.git\n", + "import os\n", + "os.environ[\"UNSLOTH_IS_PRESENT\"] = \"1\"" + ] + }, + { + "cell_type": "markdown", + "id": "6c8091fe", + "metadata": { + "id": "r2v_X2fA0Df5", + "papermill": { + "duration": 0.064606, + "end_time": "2024-11-19T19:06:41.612002", + "exception": false, + "start_time": "2024-11-19T19:06:41.547396", + "status": "completed" + }, + "tags": [] + }, + "source": [ + "* We support Llama, Mistral, CodeLlama, TinyLlama, Vicuna, Open Hermes etc\n", + "* And Yi, Qwen ([llamafied](https://huggingface.co/models?sort=trending&search=qwen+llama)), Deepseek, all Llama, Mistral derived archs.\n", + "* We support 16bit LoRA or 4bit QLoRA. Both 2x faster.\n", + "* `max_seq_length` can be set to anything, since we do automatic RoPE Scaling via [kaiokendev's](https://kaiokendev.github.io/til) method.\n", + "* [**NEW**] With [PR 26037](https://github.com/huggingface/transformers/pull/26037), we support downloading 4bit models **4x faster**! [Our repo](https://huggingface.co/unsloth) has Llama, Mistral 4bit models." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "c7d55dc3", + "metadata": { + "execution": { + "iopub.execute_input": "2024-11-19T19:06:41.737888Z", + "iopub.status.busy": "2024-11-19T19:06:41.737538Z", + "iopub.status.idle": "2024-11-19T19:08:58.672000Z", + "shell.execute_reply": "2024-11-19T19:08:58.671103Z" + }, + "id": "QmUBVEnvCDJv", + "outputId": "5eff0d61-05b4-471c-eea2-c2e84a915109", + "papermill": { + "duration": 136.999725, + "end_time": "2024-11-19T19:08:58.674026", + "exception": false, + "start_time": "2024-11-19T19:06:41.674301", + "status": "completed" + }, + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "🦥 Unsloth: Will patch your computer to enable 2x faster free finetuning.\n", + "🦥 Unsloth: Will patch your computer to enable 2x faster free finetuning.\n", + "==((====))== Unsloth 2024.10.7: Fast Llama patching. Transformers = 4.46.3.\n", + " \\\\ /| GPU: Tesla T4. Max memory: 14.741 GB. Platform = Linux.\n", + "O^O/ \\_/ \\ Pytorch: 2.5.1+cu121. CUDA = 7.5. CUDA Toolkit = 12.1.\n", + "\\ / Bfloat16 = FALSE. FA [Xformers = 0.0.28.post3. FA2 = False]\n", + " \"-____-\" Free Apache license: http://github.com/unslothai/unsloth\n", + "Unsloth: Fast downloading is enabled - ignore downloading bars which are red colored!\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "52307514a7d14c388004fc8ae3e7378e", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "model.safetensors.index.json: 0%| | 0.00/23.9k [00:00, ?B/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "97c2f928e86f4374baa0f502ca5707e3", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Downloading shards: 0%| | 0/4 [00:00, ?it/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "80a88015f8374bbd930529c4b9722389", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "model-00001-of-00004.safetensors: 0%| | 0.00/4.98G [00:00, ?B/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "6d34a8cb6dd44a51a5ede4509989bb91", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "model-00002-of-00004.safetensors: 0%| | 0.00/5.00G [00:00, ?B/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "a953aa1afe6147e4896888080c1373ba", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "model-00003-of-00004.safetensors: 0%| | 0.00/4.92G [00:00, ?B/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "679dfe3d327a41b2b518d55652625780", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "model-00004-of-00004.safetensors: 0%| | 0.00/1.17G [00:00, ?B/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "0b05cd87aeb141dd92f3756b79bf23e8", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Loading checkpoint shards: 0%| | 0/4 [00:00, ?it/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "5412bb916c244149a7232f7cf8934dce", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "generation_config.json: 0%| | 0.00/194 [00:00, ?B/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "e098d9b8d2124e30bd94fbc6e9161ad2", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "tokenizer_config.json: 0%| | 0.00/50.9k [00:00, ?B/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "8b9186a0443742c2ba93ae286db9885e", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "tokenizer.json: 0%| | 0.00/9.09M [00:00, ?B/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "a81d251b9e494da1b759157f695e8d47", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "special_tokens_map.json: 0%| | 0.00/296 [00:00, ?B/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Unsloth: We successfully patched the tokenizer to add a {% if add_generation_prompt %} to the chat_template.\n", + "This is not a bug, but please notify the Unsloth maintainers - thanks!\n", + "mlabonne/Meta-Llama-3.1-8B-Instruct-abliterated does not have a padding token! Will use pad_token = <|finetune_right_pad_id|>.\n" + ] + } + ], + "source": [ + "from unsloth import FastLanguageModel\n", + "import torch\n", + "max_seq_length = 2048 # Choose any! We auto support RoPE Scaling internally!\n", + "dtype = None # None for auto detection. Float16 for Tesla T4, V100, Bfloat16 for Ampere+\n", + "load_in_4bit = True # Use 4bit quantization to reduce memory usage. Can be False.\n", + "\n", + "# 4bit pre quantized models we support for 4x faster downloading + no OOMs.\n", + "fourbit_models = [\n", + " \"unsloth/mistral-7b-bnb-4bit\",\n", + " \"unsloth/mistral-7b-instruct-v0.2-bnb-4bit\",\n", + " \"unsloth/llama-2-7b-bnb-4bit\",\n", + " \"unsloth/llama-2-13b-bnb-4bit\",\n", + " \"unsloth/codellama-34b-bnb-4bit\",\n", + " \"unsloth/tinyllama-bnb-4bit\",\n", + " \"unsloth/llama-3-8b-bnb-4bit\",\n", + " \"unsloth/llama-3-70b-bnb-4bit\",\n", + "] # More models at https://huggingface.co/unsloth\n", + "\n", + "model, tokenizer = FastLanguageModel.from_pretrained(\n", + " model_name = \"mlabonne/Meta-Llama-3.1-8B-Instruct-abliterated\", # Choose ANY! eg teknium/OpenHermes-2.5-Mistral-7B\n", + " max_seq_length = max_seq_length,\n", + " dtype = dtype,\n", + " load_in_4bit = load_in_4bit,\n", + " # token = \"hf_...\", # use one if using gated models like meta-llama/Llama-2-7b-hf\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "2775c72b", + "metadata": { + "id": "SXd9bTZd1aaL", + "papermill": { + "duration": 0.072004, + "end_time": "2024-11-19T19:08:58.812761", + "exception": false, + "start_time": "2024-11-19T19:08:58.740757", + "status": "completed" + }, + "tags": [] + }, + "source": [ + "We now add LoRA adapters so we only need to update 1 to 10% of all parameters!" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "d4d1a72a", + "metadata": { + "execution": { + "iopub.execute_input": "2024-11-19T19:08:58.951114Z", + "iopub.status.busy": "2024-11-19T19:08:58.950567Z", + "iopub.status.idle": "2024-11-19T19:09:04.490905Z", + "shell.execute_reply": "2024-11-19T19:09:04.490238Z" + }, + "id": "6bZsfBuZDeCL", + "outputId": "b630cc80-ff95-45a2-cc0d-38666010d73b", + "papermill": { + "duration": 5.61606, + "end_time": "2024-11-19T19:09:04.492928", + "exception": false, + "start_time": "2024-11-19T19:08:58.876868", + "status": "completed" + }, + "tags": [] + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Unsloth 2024.10.7 patched 32 layers with 32 QKV layers, 32 O layers and 32 MLP layers.\n" + ] + } + ], + "source": [ + "model = FastLanguageModel.get_peft_model(\n", + " model,\n", + " r = 32, # Choose any number > 0 ! Suggested 8, 16, 32, 64, 128\n", + " target_modules = [\"q_proj\", \"k_proj\", \"v_proj\", \"o_proj\",\n", + " \"gate_proj\", \"up_proj\", \"down_proj\",],\n", + " lora_alpha = 16,\n", + " lora_dropout = 0, # Supports any, but = 0 is optimized\n", + " bias = \"none\", # Supports any, but = \"none\" is optimized\n", + " use_gradient_checkpointing = \"unsloth\", # 4x longer contexts auto supported!\n", + " random_state = 3407,\n", + " use_rslora = False, # We support rank stabilized LoRA\n", + " loftq_config = None, # And LoftQ\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "cca764a5", + "metadata": { + "id": "vITh0KVJ10qX", + "papermill": { + "duration": 0.063926, + "end_time": "2024-11-19T19:09:04.622692", + "exception": false, + "start_time": "2024-11-19T19:09:04.558766", + "status": "completed" + }, + "tags": [] + }, + "source": [ + "\n", + "### Data Prep\n", + "We now use the Alpaca dataset from [yahma](https://huggingface.co/datasets/yahma/alpaca-cleaned), which is a filtered version of 52K of the original [Alpaca dataset](https://crfm.stanford.edu/2023/03/13/alpaca.html). You can replace this code section with your own data prep.\n", + "\n", + "**[NOTE]** To train only on completions (ignoring the user's input) read TRL's docs [here](https://huggingface.co/docs/trl/sft_trainer#train-on-completions-only).\n", + "\n", + "**[NOTE]** Remember to add the **EOS_TOKEN** to the tokenized output!! Otherwise you'll get infinite generations!\n", + "\n", + "If you want to use the `ChatML` template for ShareGPT datasets, try our conversational [notebook](https://colab.research.google.com/drive/1Aau3lgPzeZKQ-98h69CCu1UJcvIBLmy2?usp=sharing).\n", + "\n", + "For text completions like novel writing, try this [notebook](https://colab.research.google.com/drive/1ef-tab5bhkvWmBOObepl1WgJvfvSzn5Q?usp=sharing)." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "69a832a3", + "metadata": { + "execution": { + "iopub.execute_input": "2024-11-19T19:09:04.754265Z", + "iopub.status.busy": "2024-11-19T19:09:04.753481Z", + "iopub.status.idle": "2024-11-19T19:09:06.180842Z", + "shell.execute_reply": "2024-11-19T19:09:06.180121Z" + }, + "id": "LjY75GoYUCB8", + "outputId": "9f40f734-788c-4793-c1af-e9d003337612", + "papermill": { + "duration": 1.495636, + "end_time": "2024-11-19T19:09:06.182870", + "exception": false, + "start_time": "2024-11-19T19:09:04.687234", + "status": "completed" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "from datasets import load_dataset\n", + "import json\n", + "from unsloth.chat_templates import get_chat_template\n", + "\n", + "tokenizer = get_chat_template(\n", + " tokenizer,\n", + " chat_template = \"llama-3\", # Supports zephyr, chatml, mistral, llama, alpaca, vicuna, vicuna_old, unsloth\n", + " #mapping = {\"role\" : \"from\", \"content\" : \"value\", \"user\" : \"human\", \"assistant\" : \"gpt\"}, # ShareGPT style\n", + " map_eos_token = True, # Maps <|im_end|> to instead\n", + ")\n", + "\n", + "def formatting_prompts_func(convos):\n", + " texts = [tokenizer.apply_chat_template(convo, tokenize = False, add_generation_prompt = False) for convo in convos]\n", + " return { \"text\" : texts, }\n", + "\n", + "with open(\"/kaggle/input/the-group-chat/output-10k-c-dropout-nonames-replies.json\") as chatfile:\n", + " convos = [json.loads(j) for j in chatfile.readlines()]\n", + "\n", + "with open(\"/kaggle/input/toxicqa/toxicQAfinal.json\") as chatfile:\n", + " convos += [json.loads(j) for j in chatfile.readlines()]\n", + " \n", + "dataset = formatting_prompts_func(convos)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "6b4a347d", + "metadata": { + "execution": { + "iopub.execute_input": "2024-11-19T19:09:06.314334Z", + "iopub.status.busy": "2024-11-19T19:09:06.313377Z", + "iopub.status.idle": "2024-11-19T19:09:06.739552Z", + "shell.execute_reply": "2024-11-19T19:09:06.738597Z" + }, + "papermill": { + "duration": 0.493416, + "end_time": "2024-11-19T19:09:06.741610", + "exception": false, + "start_time": "2024-11-19T19:09:06.248194", + "status": "completed" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "from datasets import Dataset\n", + "dataset = Dataset.from_dict(dataset)" + ] + }, + { + "cell_type": "markdown", + "id": "4c45849c", + "metadata": { + "id": "idAEIeSQ3xdS", + "papermill": { + "duration": 0.064215, + "end_time": "2024-11-19T19:09:06.871810", + "exception": false, + "start_time": "2024-11-19T19:09:06.807595", + "status": "completed" + }, + "tags": [] + }, + "source": [ + "\n", + "### Train the model\n", + "Now let's use Huggingface TRL's `SFTTrainer`! More docs here: [TRL SFT docs](https://huggingface.co/docs/trl/sft_trainer). We do 60 steps to speed things up, but you can set `num_train_epochs=1` for a full run, and turn off `max_steps=None`. We also support TRL's `DPOTrainer`!" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "7bbc400a", + "metadata": { + "execution": { + "iopub.execute_input": "2024-11-19T19:09:07.001740Z", + "iopub.status.busy": "2024-11-19T19:09:07.000573Z", + "iopub.status.idle": "2024-11-19T19:09:24.425284Z", + "shell.execute_reply": "2024-11-19T19:09:24.424466Z" + }, + "id": "95_Nn-89DhsL", + "outputId": "4b809e6d-271f-446f-dec8-abe0d13259f8", + "papermill": { + "duration": 17.491445, + "end_time": "2024-11-19T19:09:24.427211", + "exception": false, + "start_time": "2024-11-19T19:09:06.935766", + "status": "completed" + }, + "tags": [] + }, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "0f0c38ccb6c0402f84a66639ce3b0a2c", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Map (num_proc=2): 0%| | 0/17983 [00:00, ? examples/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from trl import SFTTrainer\n", + "from transformers import TrainingArguments\n", + "\n", + "trainer = SFTTrainer(\n", + " model = model,\n", + " tokenizer = tokenizer,\n", + " train_dataset = dataset,\n", + " dataset_text_field = \"text\",\n", + " max_seq_length = max_seq_length,\n", + " dataset_num_proc = 2,\n", + " packing = False, # Can make training 5x faster for short sequences.\n", + " args = TrainingArguments(\n", + " per_device_train_batch_size = 2,\n", + " gradient_accumulation_steps = 4,\n", + " warmup_steps = 5,\n", + " num_train_epochs=1,\n", + " learning_rate = 2e-4,\n", + " fp16 = not torch.cuda.is_bf16_supported(),\n", + " bf16 = torch.cuda.is_bf16_supported(),\n", + " logging_steps = 1,\n", + " optim = \"adamw_8bit\",\n", + " weight_decay = 0.01,\n", + " lr_scheduler_type = \"linear\",\n", + " seed = 3407,\n", + " output_dir = \"outputs\",\n", + " report_to = \"none\",\n", + " ),\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "5f90acfb", + "metadata": { + "cellView": "form", + "execution": { + "iopub.execute_input": "2024-11-19T19:09:24.559813Z", + "iopub.status.busy": "2024-11-19T19:09:24.558971Z", + "iopub.status.idle": "2024-11-19T19:09:24.564859Z", + "shell.execute_reply": "2024-11-19T19:09:24.564110Z" + }, + "id": "2ejIt2xSNKKp", + "outputId": "4815a050-0c0f-4a6a-9d93-b01c44eaea35", + "papermill": { + "duration": 0.072966, + "end_time": "2024-11-19T19:09:24.566638", + "exception": false, + "start_time": "2024-11-19T19:09:24.493672", + "status": "completed" + }, + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "GPU = Tesla T4. Max memory = 14.741 GB.\n", + "6.172 GB of memory reserved.\n" + ] + } + ], + "source": [ + "#@title Show current memory stats\n", + "gpu_stats = torch.cuda.get_device_properties(0)\n", + "start_gpu_memory = round(torch.cuda.max_memory_reserved() / 1024 / 1024 / 1024, 3)\n", + "max_memory = round(gpu_stats.total_memory / 1024 / 1024 / 1024, 3)\n", + "print(f\"GPU = {gpu_stats.name}. Max memory = {max_memory} GB.\")\n", + "print(f\"{start_gpu_memory} GB of memory reserved.\")" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "1a3a38b4", + "metadata": { + "execution": { + "iopub.execute_input": "2024-11-19T19:09:24.697522Z", + "iopub.status.busy": "2024-11-19T19:09:24.696820Z", + "iopub.status.idle": "2024-11-20T03:54:09.418782Z", + "shell.execute_reply": "2024-11-20T03:54:09.417866Z" + }, + "id": "yqxqAZ7KJ4oL", + "outputId": "3cf26aac-6042-4458-c4a6-d8849efb6a95", + "papermill": { + "duration": 31484.789349, + "end_time": "2024-11-20T03:54:09.420797", + "exception": false, + "start_time": "2024-11-19T19:09:24.631448", + "status": "completed" + }, + "tags": [] + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "==((====))== Unsloth - 2x faster free finetuning | Num GPUs = 1\n", + " \\\\ /| Num examples = 17,983 | Num Epochs = 1\n", + "O^O/ \\_/ \\ Batch size per device = 2 | Gradient Accumulation steps = 4\n", + "\\ / Total batch size = 8 | Total steps = 2,248\n", + " \"-____-\" Number of trainable parameters = 83,886,080\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + "Step | \n", + "Training Loss | \n", + "
---|---|
1 | \n", + "2.630800 | \n", + "
2 | \n", + "3.890600 | \n", + "
3 | \n", + "2.046200 | \n", + "
4 | \n", + "2.309300 | \n", + "
5 | \n", + "2.590900 | \n", + "
6 | \n", + "2.039400 | \n", + "
7 | \n", + "1.953500 | \n", + "
8 | \n", + "1.769300 | \n", + "
9 | \n", + "2.016900 | \n", + "
10 | \n", + "1.801700 | \n", + "
11 | \n", + "1.576400 | \n", + "
12 | \n", + "1.695400 | \n", + "
13 | \n", + "2.032200 | \n", + "
14 | \n", + "1.696800 | \n", + "
15 | \n", + "2.109500 | \n", + "
16 | \n", + "2.254800 | \n", + "
17 | \n", + "1.357900 | \n", + "
18 | \n", + "1.598300 | \n", + "
19 | \n", + "1.539700 | \n", + "
20 | \n", + "1.648300 | \n", + "
21 | \n", + "1.754000 | \n", + "
22 | \n", + "1.735000 | \n", + "
23 | \n", + "2.434300 | \n", + "
24 | \n", + "1.987900 | \n", + "
25 | \n", + "1.295100 | \n", + "
26 | \n", + "2.180100 | \n", + "
27 | \n", + "2.082700 | \n", + "
28 | \n", + "1.410300 | \n", + "
29 | \n", + "1.446500 | \n", + "
30 | \n", + "1.435300 | \n", + "
31 | \n", + "1.730600 | \n", + "
32 | \n", + "1.551800 | \n", + "
33 | \n", + "1.482700 | \n", + "
34 | \n", + "1.575600 | \n", + "
35 | \n", + "2.223500 | \n", + "
36 | \n", + "2.106000 | \n", + "
37 | \n", + "1.657500 | \n", + "
38 | \n", + "1.472100 | \n", + "
39 | \n", + "1.612800 | \n", + "
40 | \n", + "1.556300 | \n", + "
41 | \n", + "1.471300 | \n", + "
42 | \n", + "1.350800 | \n", + "
43 | \n", + "1.383000 | \n", + "
44 | \n", + "1.837300 | \n", + "
45 | \n", + "1.466900 | \n", + "
46 | \n", + "1.402600 | \n", + "
47 | \n", + "1.303800 | \n", + "
48 | \n", + "1.289400 | \n", + "
49 | \n", + "2.615500 | \n", + "
50 | \n", + "1.423800 | \n", + "
51 | \n", + "1.415600 | \n", + "
52 | \n", + "1.592000 | \n", + "
53 | \n", + "1.259700 | \n", + "
54 | \n", + "1.572500 | \n", + "
55 | \n", + "1.458800 | \n", + "
56 | \n", + "1.322500 | \n", + "
57 | \n", + "1.411800 | \n", + "
58 | \n", + "1.847200 | \n", + "
59 | \n", + "1.725800 | \n", + "
60 | \n", + "1.620000 | \n", + "
61 | \n", + "1.664900 | \n", + "
62 | \n", + "1.662400 | \n", + "
63 | \n", + "2.695700 | \n", + "
64 | \n", + "1.526500 | \n", + "
65 | \n", + "1.645500 | \n", + "
66 | \n", + "1.431200 | \n", + "
67 | \n", + "2.222500 | \n", + "
68 | \n", + "1.723900 | \n", + "
69 | \n", + "1.636600 | \n", + "
70 | \n", + "1.557700 | \n", + "
71 | \n", + "1.690900 | \n", + "
72 | \n", + "2.912400 | \n", + "
73 | \n", + "1.290300 | \n", + "
74 | \n", + "1.954400 | \n", + "
75 | \n", + "1.888500 | \n", + "
76 | \n", + "1.399600 | \n", + "
77 | \n", + "1.522700 | \n", + "
78 | \n", + "1.376900 | \n", + "
79 | \n", + "1.562900 | \n", + "
80 | \n", + "1.479700 | \n", + "
81 | \n", + "1.277100 | \n", + "
82 | \n", + "1.612200 | \n", + "
83 | \n", + "1.596400 | \n", + "
84 | \n", + "1.767900 | \n", + "
85 | \n", + "1.235800 | \n", + "
86 | \n", + "1.574400 | \n", + "
87 | \n", + "1.754300 | \n", + "
88 | \n", + "2.280800 | \n", + "
89 | \n", + "1.484400 | \n", + "
90 | \n", + "1.970000 | \n", + "
91 | \n", + "2.784900 | \n", + "
92 | \n", + "1.193300 | \n", + "
93 | \n", + "1.194500 | \n", + "
94 | \n", + "1.298700 | \n", + "
95 | \n", + "1.497400 | \n", + "
96 | \n", + "1.489800 | \n", + "
97 | \n", + "1.329000 | \n", + "
98 | \n", + "1.558400 | \n", + "
99 | \n", + "1.790600 | \n", + "
100 | \n", + "1.515000 | \n", + "
101 | \n", + "1.375700 | \n", + "
102 | \n", + "1.339900 | \n", + "
103 | \n", + "1.910300 | \n", + "
104 | \n", + "1.336000 | \n", + "
105 | \n", + "1.809300 | \n", + "
106 | \n", + "1.585100 | \n", + "
107 | \n", + "1.396300 | \n", + "
108 | \n", + "1.708700 | \n", + "
109 | \n", + "1.253400 | \n", + "
110 | \n", + "1.390500 | \n", + "
111 | \n", + "1.351900 | \n", + "
112 | \n", + "1.464300 | \n", + "
113 | \n", + "1.383400 | \n", + "
114 | \n", + "1.834200 | \n", + "
115 | \n", + "1.374400 | \n", + "
116 | \n", + "1.805500 | \n", + "
117 | \n", + "1.491200 | \n", + "
118 | \n", + "1.725400 | \n", + "
119 | \n", + "1.205700 | \n", + "
120 | \n", + "1.665300 | \n", + "
121 | \n", + "1.563900 | \n", + "
122 | \n", + "1.415700 | \n", + "
123 | \n", + "1.667000 | \n", + "
124 | \n", + "1.442300 | \n", + "
125 | \n", + "1.666900 | \n", + "
126 | \n", + "1.890500 | \n", + "
127 | \n", + "2.156600 | \n", + "
128 | \n", + "1.403700 | \n", + "
129 | \n", + "1.523600 | \n", + "
130 | \n", + "1.526900 | \n", + "
131 | \n", + "1.265600 | \n", + "
132 | \n", + "1.518900 | \n", + "
133 | \n", + "1.244900 | \n", + "
134 | \n", + "1.950600 | \n", + "
135 | \n", + "1.854500 | \n", + "
136 | \n", + "1.391500 | \n", + "
137 | \n", + "1.904300 | \n", + "
138 | \n", + "1.427200 | \n", + "
139 | \n", + "1.240400 | \n", + "
140 | \n", + "1.249600 | \n", + "
141 | \n", + "1.292200 | \n", + "
142 | \n", + "2.007600 | \n", + "
143 | \n", + "1.521400 | \n", + "
144 | \n", + "2.039100 | \n", + "
145 | \n", + "1.960900 | \n", + "
146 | \n", + "1.939400 | \n", + "
147 | \n", + "1.669800 | \n", + "
148 | \n", + "1.558000 | \n", + "
149 | \n", + "1.829700 | \n", + "
150 | \n", + "1.790300 | \n", + "
151 | \n", + "2.170600 | \n", + "
152 | \n", + "1.371500 | \n", + "
153 | \n", + "2.082900 | \n", + "
154 | \n", + "1.852700 | \n", + "
155 | \n", + "1.285800 | \n", + "
156 | \n", + "1.336700 | \n", + "
157 | \n", + "1.375500 | \n", + "
158 | \n", + "1.764100 | \n", + "
159 | \n", + "1.345000 | \n", + "
160 | \n", + "1.140800 | \n", + "
161 | \n", + "1.412700 | \n", + "
162 | \n", + "1.752300 | \n", + "
163 | \n", + "1.368300 | \n", + "
164 | \n", + "1.309200 | \n", + "
165 | \n", + "1.354100 | \n", + "
166 | \n", + "1.696000 | \n", + "
167 | \n", + "1.723500 | \n", + "
168 | \n", + "1.511200 | \n", + "
169 | \n", + "1.483300 | \n", + "
170 | \n", + "1.415700 | \n", + "
171 | \n", + "1.287400 | \n", + "
172 | \n", + "1.483200 | \n", + "
173 | \n", + "1.562700 | \n", + "
174 | \n", + "1.361400 | \n", + "
175 | \n", + "2.597900 | \n", + "
176 | \n", + "1.436000 | \n", + "
177 | \n", + "1.306000 | \n", + "
178 | \n", + "1.416300 | \n", + "
179 | \n", + "1.876800 | \n", + "
180 | \n", + "1.555100 | \n", + "
181 | \n", + "1.620600 | \n", + "
182 | \n", + "2.247000 | \n", + "
183 | \n", + "1.629000 | \n", + "
184 | \n", + "2.113300 | \n", + "
185 | \n", + "1.246700 | \n", + "
186 | \n", + "1.565700 | \n", + "
187 | \n", + "1.504300 | \n", + "
188 | \n", + "1.827100 | \n", + "
189 | \n", + "1.559800 | \n", + "
190 | \n", + "1.321600 | \n", + "
191 | \n", + "1.795400 | \n", + "
192 | \n", + "1.367100 | \n", + "
193 | \n", + "1.439100 | \n", + "
194 | \n", + "1.281500 | \n", + "
195 | \n", + "1.306900 | \n", + "
196 | \n", + "1.875500 | \n", + "
197 | \n", + "1.467800 | \n", + "
198 | \n", + "2.002600 | \n", + "
199 | \n", + "1.374800 | \n", + "
200 | \n", + "1.397600 | \n", + "
201 | \n", + "1.292000 | \n", + "
202 | \n", + "1.574200 | \n", + "
203 | \n", + "1.563400 | \n", + "
204 | \n", + "1.942800 | \n", + "
205 | \n", + "1.298600 | \n", + "
206 | \n", + "1.613900 | \n", + "
207 | \n", + "1.309400 | \n", + "
208 | \n", + "1.714200 | \n", + "
209 | \n", + "1.500000 | \n", + "
210 | \n", + "1.453800 | \n", + "
211 | \n", + "1.489000 | \n", + "
212 | \n", + "1.647800 | \n", + "
213 | \n", + "1.430400 | \n", + "
214 | \n", + "1.426000 | \n", + "
215 | \n", + "1.686200 | \n", + "
216 | \n", + "1.649600 | \n", + "
217 | \n", + "1.787100 | \n", + "
218 | \n", + "1.456800 | \n", + "
219 | \n", + "1.419800 | \n", + "
220 | \n", + "1.432400 | \n", + "
221 | \n", + "1.380800 | \n", + "
222 | \n", + "1.598300 | \n", + "
223 | \n", + "1.831000 | \n", + "
224 | \n", + "1.740900 | \n", + "
225 | \n", + "1.429500 | \n", + "
226 | \n", + "1.643800 | \n", + "
227 | \n", + "2.096100 | \n", + "
228 | \n", + "1.973500 | \n", + "
229 | \n", + "1.752800 | \n", + "
230 | \n", + "1.656700 | \n", + "
231 | \n", + "1.238000 | \n", + "
232 | \n", + "1.988300 | \n", + "
233 | \n", + "1.654400 | \n", + "
234 | \n", + "1.746100 | \n", + "
235 | \n", + "1.384200 | \n", + "
236 | \n", + "1.585900 | \n", + "
237 | \n", + "1.876900 | \n", + "
238 | \n", + "1.353400 | \n", + "
239 | \n", + "1.578700 | \n", + "
240 | \n", + "1.437200 | \n", + "
241 | \n", + "1.253500 | \n", + "
242 | \n", + "1.366800 | \n", + "
243 | \n", + "1.374000 | \n", + "
244 | \n", + "1.716100 | \n", + "
245 | \n", + "1.207600 | \n", + "
246 | \n", + "1.333400 | \n", + "
247 | \n", + "1.428700 | \n", + "
248 | \n", + "1.522900 | \n", + "
249 | \n", + "1.558700 | \n", + "
250 | \n", + "1.341900 | \n", + "
251 | \n", + "1.483800 | \n", + "
252 | \n", + "1.863500 | \n", + "
253 | \n", + "1.339100 | \n", + "
254 | \n", + "1.506000 | \n", + "
255 | \n", + "1.699300 | \n", + "
256 | \n", + "1.450600 | \n", + "
257 | \n", + "1.396900 | \n", + "
258 | \n", + "1.934000 | \n", + "
259 | \n", + "1.916600 | \n", + "
260 | \n", + "1.467200 | \n", + "
261 | \n", + "1.481400 | \n", + "
262 | \n", + "1.489800 | \n", + "
263 | \n", + "1.483800 | \n", + "
264 | \n", + "1.201600 | \n", + "
265 | \n", + "1.425700 | \n", + "
266 | \n", + "1.960000 | \n", + "
267 | \n", + "1.179500 | \n", + "
268 | \n", + "1.530600 | \n", + "
269 | \n", + "1.489700 | \n", + "
270 | \n", + "1.752900 | \n", + "
271 | \n", + "1.117600 | \n", + "
272 | \n", + "1.522700 | \n", + "
273 | \n", + "1.337000 | \n", + "
274 | \n", + "1.435400 | \n", + "
275 | \n", + "1.799900 | \n", + "
276 | \n", + "1.513800 | \n", + "
277 | \n", + "1.675900 | \n", + "
278 | \n", + "1.576400 | \n", + "
279 | \n", + "1.201000 | \n", + "
280 | \n", + "1.514300 | \n", + "
281 | \n", + "1.182400 | \n", + "
282 | \n", + "1.476700 | \n", + "
283 | \n", + "1.749500 | \n", + "
284 | \n", + "1.393500 | \n", + "
285 | \n", + "1.219900 | \n", + "
286 | \n", + "2.029000 | \n", + "
287 | \n", + "1.613700 | \n", + "
288 | \n", + "1.534200 | \n", + "
289 | \n", + "1.598400 | \n", + "
290 | \n", + "1.638300 | \n", + "
291 | \n", + "1.297900 | \n", + "
292 | \n", + "1.190500 | \n", + "
293 | \n", + "1.308400 | \n", + "
294 | \n", + "1.684000 | \n", + "
295 | \n", + "1.529900 | \n", + "
296 | \n", + "1.313900 | \n", + "
297 | \n", + "1.689900 | \n", + "
298 | \n", + "1.836100 | \n", + "
299 | \n", + "0.988100 | \n", + "
300 | \n", + "2.004800 | \n", + "
301 | \n", + "1.471100 | \n", + "
302 | \n", + "1.772600 | \n", + "
303 | \n", + "1.634900 | \n", + "
304 | \n", + "1.552100 | \n", + "
305 | \n", + "1.773300 | \n", + "
306 | \n", + "1.281600 | \n", + "
307 | \n", + "1.880300 | \n", + "
308 | \n", + "1.302500 | \n", + "
309 | \n", + "1.628900 | \n", + "
310 | \n", + "1.379500 | \n", + "
311 | \n", + "1.751200 | \n", + "
312 | \n", + "1.635100 | \n", + "
313 | \n", + "1.433400 | \n", + "
314 | \n", + "1.383600 | \n", + "
315 | \n", + "1.943200 | \n", + "
316 | \n", + "1.407600 | \n", + "
317 | \n", + "1.611600 | \n", + "
318 | \n", + "1.418900 | \n", + "
319 | \n", + "1.279200 | \n", + "
320 | \n", + "1.244300 | \n", + "
321 | \n", + "1.520300 | \n", + "
322 | \n", + "1.269600 | \n", + "
323 | \n", + "1.691100 | \n", + "
324 | \n", + "1.492600 | \n", + "
325 | \n", + "1.520900 | \n", + "
326 | \n", + "1.526200 | \n", + "
327 | \n", + "1.318200 | \n", + "
328 | \n", + "1.447700 | \n", + "
329 | \n", + "1.462800 | \n", + "
330 | \n", + "1.310700 | \n", + "
331 | \n", + "1.142200 | \n", + "
332 | \n", + "1.602700 | \n", + "
333 | \n", + "1.547900 | \n", + "
334 | \n", + "1.257900 | \n", + "
335 | \n", + "1.455500 | \n", + "
336 | \n", + "1.856100 | \n", + "
337 | \n", + "1.951500 | \n", + "
338 | \n", + "1.285300 | \n", + "
339 | \n", + "1.459400 | \n", + "
340 | \n", + "1.330600 | \n", + "
341 | \n", + "1.553900 | \n", + "
342 | \n", + "1.273900 | \n", + "
343 | \n", + "1.747800 | \n", + "
344 | \n", + "1.244400 | \n", + "
345 | \n", + "1.430000 | \n", + "
346 | \n", + "1.529500 | \n", + "
347 | \n", + "1.239300 | \n", + "
348 | \n", + "1.446900 | \n", + "
349 | \n", + "1.354200 | \n", + "
350 | \n", + "1.366100 | \n", + "
351 | \n", + "1.577100 | \n", + "
352 | \n", + "1.198800 | \n", + "
353 | \n", + "1.002100 | \n", + "
354 | \n", + "1.733200 | \n", + "
355 | \n", + "1.396900 | \n", + "
356 | \n", + "1.196100 | \n", + "
357 | \n", + "2.214000 | \n", + "
358 | \n", + "1.258000 | \n", + "
359 | \n", + "1.507500 | \n", + "
360 | \n", + "1.523100 | \n", + "
361 | \n", + "1.775900 | \n", + "
362 | \n", + "1.635000 | \n", + "
363 | \n", + "1.403300 | \n", + "
364 | \n", + "1.290600 | \n", + "
365 | \n", + "1.910600 | \n", + "
366 | \n", + "1.062600 | \n", + "
367 | \n", + "1.305800 | \n", + "
368 | \n", + "1.496100 | \n", + "
369 | \n", + "1.966700 | \n", + "
370 | \n", + "1.938000 | \n", + "
371 | \n", + "1.379900 | \n", + "
372 | \n", + "1.668600 | \n", + "
373 | \n", + "1.817900 | \n", + "
374 | \n", + "1.280400 | \n", + "
375 | \n", + "1.392400 | \n", + "
376 | \n", + "1.321900 | \n", + "
377 | \n", + "1.183100 | \n", + "
378 | \n", + "1.154900 | \n", + "
379 | \n", + "1.798800 | \n", + "
380 | \n", + "1.418800 | \n", + "
381 | \n", + "1.549300 | \n", + "
382 | \n", + "1.545200 | \n", + "
383 | \n", + "1.501500 | \n", + "
384 | \n", + "1.887700 | \n", + "
385 | \n", + "1.446700 | \n", + "
386 | \n", + "1.279900 | \n", + "
387 | \n", + "1.308700 | \n", + "
388 | \n", + "1.602800 | \n", + "
389 | \n", + "1.582900 | \n", + "
390 | \n", + "1.423400 | \n", + "
391 | \n", + "1.529300 | \n", + "
392 | \n", + "1.696300 | \n", + "
393 | \n", + "1.673200 | \n", + "
394 | \n", + "1.109700 | \n", + "
395 | \n", + "1.248800 | \n", + "
396 | \n", + "1.089700 | \n", + "
397 | \n", + "1.326600 | \n", + "
398 | \n", + "1.688600 | \n", + "
399 | \n", + "1.681000 | \n", + "
400 | \n", + "1.423900 | \n", + "
401 | \n", + "1.131800 | \n", + "
402 | \n", + "1.154600 | \n", + "
403 | \n", + "1.463200 | \n", + "
404 | \n", + "1.229600 | \n", + "
405 | \n", + "2.188300 | \n", + "
406 | \n", + "1.538900 | \n", + "
407 | \n", + "1.662500 | \n", + "
408 | \n", + "1.718800 | \n", + "
409 | \n", + "1.526500 | \n", + "
410 | \n", + "1.792600 | \n", + "
411 | \n", + "1.354700 | \n", + "
412 | \n", + "1.364100 | \n", + "
413 | \n", + "1.441500 | \n", + "
414 | \n", + "1.432600 | \n", + "
415 | \n", + "1.684900 | \n", + "
416 | \n", + "1.885400 | \n", + "
417 | \n", + "2.052100 | \n", + "
418 | \n", + "1.424000 | \n", + "
419 | \n", + "1.474100 | \n", + "
420 | \n", + "1.130200 | \n", + "
421 | \n", + "2.011000 | \n", + "
422 | \n", + "1.323600 | \n", + "
423 | \n", + "1.810000 | \n", + "
424 | \n", + "1.666700 | \n", + "
425 | \n", + "1.281500 | \n", + "
426 | \n", + "1.930800 | \n", + "
427 | \n", + "1.210800 | \n", + "
428 | \n", + "2.097600 | \n", + "
429 | \n", + "1.300800 | \n", + "
430 | \n", + "1.525600 | \n", + "
431 | \n", + "2.123900 | \n", + "
432 | \n", + "1.948600 | \n", + "
433 | \n", + "1.202800 | \n", + "
434 | \n", + "1.412100 | \n", + "
435 | \n", + "1.424500 | \n", + "
436 | \n", + "1.254200 | \n", + "
437 | \n", + "1.594300 | \n", + "
438 | \n", + "1.343600 | \n", + "
439 | \n", + "2.224800 | \n", + "
440 | \n", + "1.648500 | \n", + "
441 | \n", + "1.470300 | \n", + "
442 | \n", + "1.676900 | \n", + "
443 | \n", + "1.660600 | \n", + "
444 | \n", + "1.278800 | \n", + "
445 | \n", + "1.455500 | \n", + "
446 | \n", + "1.843400 | \n", + "
447 | \n", + "1.452500 | \n", + "
448 | \n", + "1.401100 | \n", + "
449 | \n", + "1.349800 | \n", + "
450 | \n", + "1.570700 | \n", + "
451 | \n", + "1.419100 | \n", + "
452 | \n", + "1.579500 | \n", + "
453 | \n", + "1.726000 | \n", + "
454 | \n", + "1.226900 | \n", + "
455 | \n", + "1.650000 | \n", + "
456 | \n", + "2.521900 | \n", + "
457 | \n", + "1.394800 | \n", + "
458 | \n", + "1.665600 | \n", + "
459 | \n", + "1.412600 | \n", + "
460 | \n", + "1.723900 | \n", + "
461 | \n", + "1.355500 | \n", + "
462 | \n", + "1.423500 | \n", + "
463 | \n", + "1.738900 | \n", + "
464 | \n", + "1.365700 | \n", + "
465 | \n", + "1.528600 | \n", + "
466 | \n", + "1.501800 | \n", + "
467 | \n", + "1.463700 | \n", + "
468 | \n", + "1.329600 | \n", + "
469 | \n", + "1.329900 | \n", + "
470 | \n", + "2.145800 | \n", + "
471 | \n", + "1.581700 | \n", + "
472 | \n", + "1.282900 | \n", + "
473 | \n", + "1.661500 | \n", + "
474 | \n", + "1.645100 | \n", + "
475 | \n", + "1.325900 | \n", + "
476 | \n", + "1.704000 | \n", + "
477 | \n", + "1.312400 | \n", + "
478 | \n", + "1.279000 | \n", + "
479 | \n", + "1.162900 | \n", + "
480 | \n", + "1.459400 | \n", + "
481 | \n", + "1.444600 | \n", + "
482 | \n", + "1.411800 | \n", + "
483 | \n", + "1.143400 | \n", + "
484 | \n", + "1.720400 | \n", + "
485 | \n", + "1.269200 | \n", + "
486 | \n", + "1.291000 | \n", + "
487 | \n", + "1.524500 | \n", + "
488 | \n", + "1.729100 | \n", + "
489 | \n", + "1.271900 | \n", + "
490 | \n", + "1.582800 | \n", + "
491 | \n", + "1.221200 | \n", + "
492 | \n", + "1.439500 | \n", + "
493 | \n", + "1.528500 | \n", + "
494 | \n", + "1.775500 | \n", + "
495 | \n", + "1.594600 | \n", + "
496 | \n", + "1.560900 | \n", + "
497 | \n", + "1.791400 | \n", + "
498 | \n", + "1.397800 | \n", + "
499 | \n", + "1.740400 | \n", + "
500 | \n", + "1.209500 | \n", + "
501 | \n", + "1.385600 | \n", + "
502 | \n", + "1.062200 | \n", + "
503 | \n", + "1.355400 | \n", + "
504 | \n", + "1.768400 | \n", + "
505 | \n", + "1.225800 | \n", + "
506 | \n", + "1.263000 | \n", + "
507 | \n", + "1.456800 | \n", + "
508 | \n", + "1.314900 | \n", + "
509 | \n", + "1.377100 | \n", + "
510 | \n", + "1.589900 | \n", + "
511 | \n", + "1.439100 | \n", + "
512 | \n", + "1.394000 | \n", + "
513 | \n", + "1.307200 | \n", + "
514 | \n", + "1.466100 | \n", + "
515 | \n", + "1.367400 | \n", + "
516 | \n", + "1.782700 | \n", + "
517 | \n", + "1.335600 | \n", + "
518 | \n", + "1.384600 | \n", + "
519 | \n", + "1.289300 | \n", + "
520 | \n", + "1.386800 | \n", + "
521 | \n", + "1.249500 | \n", + "
522 | \n", + "1.728500 | \n", + "
523 | \n", + "1.524500 | \n", + "
524 | \n", + "1.428600 | \n", + "
525 | \n", + "1.170900 | \n", + "
526 | \n", + "1.681900 | \n", + "
527 | \n", + "1.852700 | \n", + "
528 | \n", + "1.664800 | \n", + "
529 | \n", + "1.413400 | \n", + "
530 | \n", + "1.575700 | \n", + "
531 | \n", + "1.728200 | \n", + "
532 | \n", + "1.336700 | \n", + "
533 | \n", + "1.720700 | \n", + "
534 | \n", + "1.595600 | \n", + "
535 | \n", + "1.270200 | \n", + "
536 | \n", + "1.291800 | \n", + "
537 | \n", + "1.491500 | \n", + "
538 | \n", + "1.836400 | \n", + "
539 | \n", + "1.031600 | \n", + "
540 | \n", + "1.526600 | \n", + "
541 | \n", + "1.660300 | \n", + "
542 | \n", + "1.350900 | \n", + "
543 | \n", + "1.407000 | \n", + "
544 | \n", + "1.463300 | \n", + "
545 | \n", + "2.063900 | \n", + "
546 | \n", + "1.244900 | \n", + "
547 | \n", + "1.768600 | \n", + "
548 | \n", + "1.484100 | \n", + "
549 | \n", + "1.710700 | \n", + "
550 | \n", + "1.859800 | \n", + "
551 | \n", + "1.277100 | \n", + "
552 | \n", + "1.769000 | \n", + "
553 | \n", + "1.948400 | \n", + "
554 | \n", + "1.805500 | \n", + "
555 | \n", + "2.075900 | \n", + "
556 | \n", + "1.129500 | \n", + "
557 | \n", + "1.152000 | \n", + "
558 | \n", + "1.914600 | \n", + "
559 | \n", + "1.250400 | \n", + "
560 | \n", + "1.497500 | \n", + "
561 | \n", + "1.593700 | \n", + "
562 | \n", + "1.610600 | \n", + "
563 | \n", + "1.300700 | \n", + "
564 | \n", + "1.357500 | \n", + "
565 | \n", + "1.227800 | \n", + "
566 | \n", + "1.638200 | \n", + "
567 | \n", + "1.665600 | \n", + "
568 | \n", + "1.277100 | \n", + "
569 | \n", + "1.279300 | \n", + "
570 | \n", + "1.357200 | \n", + "
571 | \n", + "1.219100 | \n", + "
572 | \n", + "1.378900 | \n", + "
573 | \n", + "1.306300 | \n", + "
574 | \n", + "1.673900 | \n", + "
575 | \n", + "1.395800 | \n", + "
576 | \n", + "1.278400 | \n", + "
577 | \n", + "1.794600 | \n", + "
578 | \n", + "1.351700 | \n", + "
579 | \n", + "1.798200 | \n", + "
580 | \n", + "1.687900 | \n", + "
581 | \n", + "1.532900 | \n", + "
582 | \n", + "1.552200 | \n", + "
583 | \n", + "1.116400 | \n", + "
584 | \n", + "1.615600 | \n", + "
585 | \n", + "1.610900 | \n", + "
586 | \n", + "1.367900 | \n", + "
587 | \n", + "1.230200 | \n", + "
588 | \n", + "1.145400 | \n", + "
589 | \n", + "1.199000 | \n", + "
590 | \n", + "1.694700 | \n", + "
591 | \n", + "1.578000 | \n", + "
592 | \n", + "1.340700 | \n", + "
593 | \n", + "1.530900 | \n", + "
594 | \n", + "1.495900 | \n", + "
595 | \n", + "1.540700 | \n", + "
596 | \n", + "1.519800 | \n", + "
597 | \n", + "1.513500 | \n", + "
598 | \n", + "1.302300 | \n", + "
599 | \n", + "1.565300 | \n", + "
600 | \n", + "1.628200 | \n", + "
601 | \n", + "1.244700 | \n", + "
602 | \n", + "1.480900 | \n", + "
603 | \n", + "1.217700 | \n", + "
604 | \n", + "1.417000 | \n", + "
605 | \n", + "1.412500 | \n", + "
606 | \n", + "1.271400 | \n", + "
607 | \n", + "1.465700 | \n", + "
608 | \n", + "1.351600 | \n", + "
609 | \n", + "2.184100 | \n", + "
610 | \n", + "1.668600 | \n", + "
611 | \n", + "1.573500 | \n", + "
612 | \n", + "1.146500 | \n", + "
613 | \n", + "1.460900 | \n", + "
614 | \n", + "2.015100 | \n", + "
615 | \n", + "1.843800 | \n", + "
616 | \n", + "1.539300 | \n", + "
617 | \n", + "1.631000 | \n", + "
618 | \n", + "1.281700 | \n", + "
619 | \n", + "1.026100 | \n", + "
620 | \n", + "1.626800 | \n", + "
621 | \n", + "1.691800 | \n", + "
622 | \n", + "1.194600 | \n", + "
623 | \n", + "1.689400 | \n", + "
624 | \n", + "1.213000 | \n", + "
625 | \n", + "1.371200 | \n", + "
626 | \n", + "1.780900 | \n", + "
627 | \n", + "1.491700 | \n", + "
628 | \n", + "1.341300 | \n", + "
629 | \n", + "1.266200 | \n", + "
630 | \n", + "1.129100 | \n", + "
631 | \n", + "1.422300 | \n", + "
632 | \n", + "1.339000 | \n", + "
633 | \n", + "1.364800 | \n", + "
634 | \n", + "1.196100 | \n", + "
635 | \n", + "1.522000 | \n", + "
636 | \n", + "1.601500 | \n", + "
637 | \n", + "1.464600 | \n", + "
638 | \n", + "1.826800 | \n", + "
639 | \n", + "1.684900 | \n", + "
640 | \n", + "1.434100 | \n", + "
641 | \n", + "1.318800 | \n", + "
642 | \n", + "1.990200 | \n", + "
643 | \n", + "1.540200 | \n", + "
644 | \n", + "1.243100 | \n", + "
645 | \n", + "1.208400 | \n", + "
646 | \n", + "1.669200 | \n", + "
647 | \n", + "1.667100 | \n", + "
648 | \n", + "1.764100 | \n", + "
649 | \n", + "2.274200 | \n", + "
650 | \n", + "1.929500 | \n", + "
651 | \n", + "1.532800 | \n", + "
652 | \n", + "1.949200 | \n", + "
653 | \n", + "1.194300 | \n", + "
654 | \n", + "1.081300 | \n", + "
655 | \n", + "1.703300 | \n", + "
656 | \n", + "1.786600 | \n", + "
657 | \n", + "1.638000 | \n", + "
658 | \n", + "1.452200 | \n", + "
659 | \n", + "1.484700 | \n", + "
660 | \n", + "1.704700 | \n", + "
661 | \n", + "1.414400 | \n", + "
662 | \n", + "1.265200 | \n", + "
663 | \n", + "1.442600 | \n", + "
664 | \n", + "1.787400 | \n", + "
665 | \n", + "1.491300 | \n", + "
666 | \n", + "1.252600 | \n", + "
667 | \n", + "1.486600 | \n", + "
668 | \n", + "1.734800 | \n", + "
669 | \n", + "1.486900 | \n", + "
670 | \n", + "1.394600 | \n", + "
671 | \n", + "1.541100 | \n", + "
672 | \n", + "1.947600 | \n", + "
673 | \n", + "1.548600 | \n", + "
674 | \n", + "1.357000 | \n", + "
675 | \n", + "1.737100 | \n", + "
676 | \n", + "1.911500 | \n", + "
677 | \n", + "1.460300 | \n", + "
678 | \n", + "1.295200 | \n", + "
679 | \n", + "1.480900 | \n", + "
680 | \n", + "1.638200 | \n", + "
681 | \n", + "1.077400 | \n", + "
682 | \n", + "1.665800 | \n", + "
683 | \n", + "1.515100 | \n", + "
684 | \n", + "1.075100 | \n", + "
685 | \n", + "1.205900 | \n", + "
686 | \n", + "1.628200 | \n", + "
687 | \n", + "1.186100 | \n", + "
688 | \n", + "1.170100 | \n", + "
689 | \n", + "1.561000 | \n", + "
690 | \n", + "1.678000 | \n", + "
691 | \n", + "1.413300 | \n", + "
692 | \n", + "1.252300 | \n", + "
693 | \n", + "1.607600 | \n", + "
694 | \n", + "1.710500 | \n", + "
695 | \n", + "1.323000 | \n", + "
696 | \n", + "1.507300 | \n", + "
697 | \n", + "1.679300 | \n", + "
698 | \n", + "1.545000 | \n", + "
699 | \n", + "1.221800 | \n", + "
700 | \n", + "1.493300 | \n", + "
701 | \n", + "1.181700 | \n", + "
702 | \n", + "1.171100 | \n", + "
703 | \n", + "1.686500 | \n", + "
704 | \n", + "1.423300 | \n", + "
705 | \n", + "1.660100 | \n", + "
706 | \n", + "1.263900 | \n", + "
707 | \n", + "1.125800 | \n", + "
708 | \n", + "1.991300 | \n", + "
709 | \n", + "1.165800 | \n", + "
710 | \n", + "1.751300 | \n", + "
711 | \n", + "1.185500 | \n", + "
712 | \n", + "1.643000 | \n", + "
713 | \n", + "1.357500 | \n", + "
714 | \n", + "2.634900 | \n", + "
715 | \n", + "1.853700 | \n", + "
716 | \n", + "1.403100 | \n", + "
717 | \n", + "1.271200 | \n", + "
718 | \n", + "1.794200 | \n", + "
719 | \n", + "1.340800 | \n", + "
720 | \n", + "1.588000 | \n", + "
721 | \n", + "1.327900 | \n", + "
722 | \n", + "1.568700 | \n", + "
723 | \n", + "1.304600 | \n", + "
724 | \n", + "1.843200 | \n", + "
725 | \n", + "1.635400 | \n", + "
726 | \n", + "1.483500 | \n", + "
727 | \n", + "1.584500 | \n", + "
728 | \n", + "1.464600 | \n", + "
729 | \n", + "1.496400 | \n", + "
730 | \n", + "1.813200 | \n", + "
731 | \n", + "1.410100 | \n", + "
732 | \n", + "1.810800 | \n", + "
733 | \n", + "1.155400 | \n", + "
734 | \n", + "1.378800 | \n", + "
735 | \n", + "1.349900 | \n", + "
736 | \n", + "1.917600 | \n", + "
737 | \n", + "1.043300 | \n", + "
738 | \n", + "1.605200 | \n", + "
739 | \n", + "1.224700 | \n", + "
740 | \n", + "1.507200 | \n", + "
741 | \n", + "1.447900 | \n", + "
742 | \n", + "1.591600 | \n", + "
743 | \n", + "2.113700 | \n", + "
744 | \n", + "1.404600 | \n", + "
745 | \n", + "1.592900 | \n", + "
746 | \n", + "1.695800 | \n", + "
747 | \n", + "1.412600 | \n", + "
748 | \n", + "1.391600 | \n", + "
749 | \n", + "1.363000 | \n", + "
750 | \n", + "1.516600 | \n", + "
751 | \n", + "1.384200 | \n", + "
752 | \n", + "1.572800 | \n", + "
753 | \n", + "1.747500 | \n", + "
754 | \n", + "1.254300 | \n", + "
755 | \n", + "1.083300 | \n", + "
756 | \n", + "1.563300 | \n", + "
757 | \n", + "1.572800 | \n", + "
758 | \n", + "1.516600 | \n", + "
759 | \n", + "1.577300 | \n", + "
760 | \n", + "1.028100 | \n", + "
761 | \n", + "1.663800 | \n", + "
762 | \n", + "1.441500 | \n", + "
763 | \n", + "2.225800 | \n", + "
764 | \n", + "2.147200 | \n", + "
765 | \n", + "1.190000 | \n", + "
766 | \n", + "1.563400 | \n", + "
767 | \n", + "1.739800 | \n", + "
768 | \n", + "1.711500 | \n", + "
769 | \n", + "1.489900 | \n", + "
770 | \n", + "1.457400 | \n", + "
771 | \n", + "1.406400 | \n", + "
772 | \n", + "1.829300 | \n", + "
773 | \n", + "1.324700 | \n", + "
774 | \n", + "1.587400 | \n", + "
775 | \n", + "1.433100 | \n", + "
776 | \n", + "1.378400 | \n", + "
777 | \n", + "1.606600 | \n", + "
778 | \n", + "1.879500 | \n", + "
779 | \n", + "1.306400 | \n", + "
780 | \n", + "1.581200 | \n", + "
781 | \n", + "1.278500 | \n", + "
782 | \n", + "1.535900 | \n", + "
783 | \n", + "1.225800 | \n", + "
784 | \n", + "2.003700 | \n", + "
785 | \n", + "1.088100 | \n", + "
786 | \n", + "1.354800 | \n", + "
787 | \n", + "1.332300 | \n", + "
788 | \n", + "1.819000 | \n", + "
789 | \n", + "1.608200 | \n", + "
790 | \n", + "1.327000 | \n", + "
791 | \n", + "1.225600 | \n", + "
792 | \n", + "1.277900 | \n", + "
793 | \n", + "1.327900 | \n", + "
794 | \n", + "1.108800 | \n", + "
795 | \n", + "1.391200 | \n", + "
796 | \n", + "1.240200 | \n", + "
797 | \n", + "1.476700 | \n", + "
798 | \n", + "1.279800 | \n", + "
799 | \n", + "1.272400 | \n", + "
800 | \n", + "1.265800 | \n", + "
801 | \n", + "1.765700 | \n", + "
802 | \n", + "1.198300 | \n", + "
803 | \n", + "1.513900 | \n", + "
804 | \n", + "1.733100 | \n", + "
805 | \n", + "1.162400 | \n", + "
806 | \n", + "1.225300 | \n", + "
807 | \n", + "1.477200 | \n", + "
808 | \n", + "1.258100 | \n", + "
809 | \n", + "1.463000 | \n", + "
810 | \n", + "1.287500 | \n", + "
811 | \n", + "1.286400 | \n", + "
812 | \n", + "2.015000 | \n", + "
813 | \n", + "1.541200 | \n", + "
814 | \n", + "1.361500 | \n", + "
815 | \n", + "1.675500 | \n", + "
816 | \n", + "2.102900 | \n", + "
817 | \n", + "1.497300 | \n", + "
818 | \n", + "1.325800 | \n", + "
819 | \n", + "1.106700 | \n", + "
820 | \n", + "1.995500 | \n", + "
821 | \n", + "1.417900 | \n", + "
822 | \n", + "1.346900 | \n", + "
823 | \n", + "1.236300 | \n", + "
824 | \n", + "1.389600 | \n", + "
825 | \n", + "1.390000 | \n", + "
826 | \n", + "1.554000 | \n", + "
827 | \n", + "1.300400 | \n", + "
828 | \n", + "1.295400 | \n", + "
829 | \n", + "0.997900 | \n", + "
830 | \n", + "1.612000 | \n", + "
831 | \n", + "1.074200 | \n", + "
832 | \n", + "1.533200 | \n", + "
833 | \n", + "1.859500 | \n", + "
834 | \n", + "1.348300 | \n", + "
835 | \n", + "1.154200 | \n", + "
836 | \n", + "1.120100 | \n", + "
837 | \n", + "1.237400 | \n", + "
838 | \n", + "1.442200 | \n", + "
839 | \n", + "1.551300 | \n", + "
840 | \n", + "1.410900 | \n", + "
841 | \n", + "1.100900 | \n", + "
842 | \n", + "1.564200 | \n", + "
843 | \n", + "1.406200 | \n", + "
844 | \n", + "1.343700 | \n", + "
845 | \n", + "1.035800 | \n", + "
846 | \n", + "1.610900 | \n", + "
847 | \n", + "1.361900 | \n", + "
848 | \n", + "1.297900 | \n", + "
849 | \n", + "1.282300 | \n", + "
850 | \n", + "1.441000 | \n", + "
851 | \n", + "1.709000 | \n", + "
852 | \n", + "1.403900 | \n", + "
853 | \n", + "1.521900 | \n", + "
854 | \n", + "1.834800 | \n", + "
855 | \n", + "1.336400 | \n", + "
856 | \n", + "1.626300 | \n", + "
857 | \n", + "1.509100 | \n", + "
858 | \n", + "1.253900 | \n", + "
859 | \n", + "1.510500 | \n", + "
860 | \n", + "1.065700 | \n", + "
861 | \n", + "1.415800 | \n", + "
862 | \n", + "1.461300 | \n", + "
863 | \n", + "1.270700 | \n", + "
864 | \n", + "1.240900 | \n", + "
865 | \n", + "1.191800 | \n", + "
866 | \n", + "1.753400 | \n", + "
867 | \n", + "1.428500 | \n", + "
868 | \n", + "1.065300 | \n", + "
869 | \n", + "1.848800 | \n", + "
870 | \n", + "1.081000 | \n", + "
871 | \n", + "1.730700 | \n", + "
872 | \n", + "1.389900 | \n", + "
873 | \n", + "1.115400 | \n", + "
874 | \n", + "1.822700 | \n", + "
875 | \n", + "1.337400 | \n", + "
876 | \n", + "1.350700 | \n", + "
877 | \n", + "1.734600 | \n", + "
878 | \n", + "1.393500 | \n", + "
879 | \n", + "2.038900 | \n", + "
880 | \n", + "1.410700 | \n", + "
881 | \n", + "1.389000 | \n", + "
882 | \n", + "1.274000 | \n", + "
883 | \n", + "1.177900 | \n", + "
884 | \n", + "1.888800 | \n", + "
885 | \n", + "1.646000 | \n", + "
886 | \n", + "1.487500 | \n", + "
887 | \n", + "1.067000 | \n", + "
888 | \n", + "1.575100 | \n", + "
889 | \n", + "1.559200 | \n", + "
890 | \n", + "1.549200 | \n", + "
891 | \n", + "1.540300 | \n", + "
892 | \n", + "1.419300 | \n", + "
893 | \n", + "1.712500 | \n", + "
894 | \n", + "1.350700 | \n", + "
895 | \n", + "1.752100 | \n", + "
896 | \n", + "1.261200 | \n", + "
897 | \n", + "1.434600 | \n", + "
898 | \n", + "1.274000 | \n", + "
899 | \n", + "1.536000 | \n", + "
900 | \n", + "1.542900 | \n", + "
901 | \n", + "1.209600 | \n", + "
902 | \n", + "1.548400 | \n", + "
903 | \n", + "2.120500 | \n", + "
904 | \n", + "1.336600 | \n", + "
905 | \n", + "1.544500 | \n", + "
906 | \n", + "1.206500 | \n", + "
907 | \n", + "1.657200 | \n", + "
908 | \n", + "1.786100 | \n", + "
909 | \n", + "1.586900 | \n", + "
910 | \n", + "1.827000 | \n", + "
911 | \n", + "1.245700 | \n", + "
912 | \n", + "1.145600 | \n", + "
913 | \n", + "2.626100 | \n", + "
914 | \n", + "1.461700 | \n", + "
915 | \n", + "1.441800 | \n", + "
916 | \n", + "1.404300 | \n", + "
917 | \n", + "1.342300 | \n", + "
918 | \n", + "1.377500 | \n", + "
919 | \n", + "1.206200 | \n", + "
920 | \n", + "2.012700 | \n", + "
921 | \n", + "1.423500 | \n", + "
922 | \n", + "1.192800 | \n", + "
923 | \n", + "1.137000 | \n", + "
924 | \n", + "1.858500 | \n", + "
925 | \n", + "1.419500 | \n", + "
926 | \n", + "1.384400 | \n", + "
927 | \n", + "1.302900 | \n", + "
928 | \n", + "1.399100 | \n", + "
929 | \n", + "1.561600 | \n", + "
930 | \n", + "1.058800 | \n", + "
931 | \n", + "1.486500 | \n", + "
932 | \n", + "1.497200 | \n", + "
933 | \n", + "1.427400 | \n", + "
934 | \n", + "1.555000 | \n", + "
935 | \n", + "1.311100 | \n", + "
936 | \n", + "1.726100 | \n", + "
937 | \n", + "1.289000 | \n", + "
938 | \n", + "1.301300 | \n", + "
939 | \n", + "1.256300 | \n", + "
940 | \n", + "1.718900 | \n", + "
941 | \n", + "1.212500 | \n", + "
942 | \n", + "1.311300 | \n", + "
943 | \n", + "2.020900 | \n", + "
944 | \n", + "1.301500 | \n", + "
945 | \n", + "1.505000 | \n", + "
946 | \n", + "1.237800 | \n", + "
947 | \n", + "1.695500 | \n", + "
948 | \n", + "1.220300 | \n", + "
949 | \n", + "1.371200 | \n", + "
950 | \n", + "1.465800 | \n", + "
951 | \n", + "1.393900 | \n", + "
952 | \n", + "1.552600 | \n", + "
953 | \n", + "1.494400 | \n", + "
954 | \n", + "1.475600 | \n", + "
955 | \n", + "1.151900 | \n", + "
956 | \n", + "1.538300 | \n", + "
957 | \n", + "1.274300 | \n", + "
958 | \n", + "1.254600 | \n", + "
959 | \n", + "1.485200 | \n", + "
960 | \n", + "1.351000 | \n", + "
961 | \n", + "1.379900 | \n", + "
962 | \n", + "1.929800 | \n", + "
963 | \n", + "1.618700 | \n", + "
964 | \n", + "2.524200 | \n", + "
965 | \n", + "1.339300 | \n", + "
966 | \n", + "1.133800 | \n", + "
967 | \n", + "1.306300 | \n", + "
968 | \n", + "1.940100 | \n", + "
969 | \n", + "1.781500 | \n", + "
970 | \n", + "1.331300 | \n", + "
971 | \n", + "1.667500 | \n", + "
972 | \n", + "1.111500 | \n", + "
973 | \n", + "1.619100 | \n", + "
974 | \n", + "1.439200 | \n", + "
975 | \n", + "1.011600 | \n", + "
976 | \n", + "1.163300 | \n", + "
977 | \n", + "1.780100 | \n", + "
978 | \n", + "1.316300 | \n", + "
979 | \n", + "1.294600 | \n", + "
980 | \n", + "1.178600 | \n", + "
981 | \n", + "1.461700 | \n", + "
982 | \n", + "1.427500 | \n", + "
983 | \n", + "1.259800 | \n", + "
984 | \n", + "1.858700 | \n", + "
985 | \n", + "1.791300 | \n", + "
986 | \n", + "1.220500 | \n", + "
987 | \n", + "1.316500 | \n", + "
988 | \n", + "1.131000 | \n", + "
989 | \n", + "1.311100 | \n", + "
990 | \n", + "1.336700 | \n", + "
991 | \n", + "1.160000 | \n", + "
992 | \n", + "1.800800 | \n", + "
993 | \n", + "1.271700 | \n", + "
994 | \n", + "1.853600 | \n", + "
995 | \n", + "1.378400 | \n", + "
996 | \n", + "1.437100 | \n", + "
997 | \n", + "1.333300 | \n", + "
998 | \n", + "1.166500 | \n", + "
999 | \n", + "1.269800 | \n", + "
1000 | \n", + "1.610900 | \n", + "
1001 | \n", + "1.289500 | \n", + "
1002 | \n", + "1.112500 | \n", + "
1003 | \n", + "1.724400 | \n", + "
1004 | \n", + "1.691700 | \n", + "
1005 | \n", + "1.222600 | \n", + "
1006 | \n", + "1.334900 | \n", + "
1007 | \n", + "1.215500 | \n", + "
1008 | \n", + "1.903400 | \n", + "
1009 | \n", + "1.353200 | \n", + "
1010 | \n", + "1.596800 | \n", + "
1011 | \n", + "1.202200 | \n", + "
1012 | \n", + "1.346700 | \n", + "
1013 | \n", + "1.326600 | \n", + "
1014 | \n", + "1.306600 | \n", + "
1015 | \n", + "2.119000 | \n", + "
1016 | \n", + "1.609300 | \n", + "
1017 | \n", + "1.680300 | \n", + "
1018 | \n", + "1.040800 | \n", + "
1019 | \n", + "2.032100 | \n", + "
1020 | \n", + "1.320300 | \n", + "
1021 | \n", + "1.080100 | \n", + "
1022 | \n", + "1.722700 | \n", + "
1023 | \n", + "1.397200 | \n", + "
1024 | \n", + "1.408400 | \n", + "
1025 | \n", + "1.321100 | \n", + "
1026 | \n", + "1.503500 | \n", + "
1027 | \n", + "1.384200 | \n", + "
1028 | \n", + "1.466300 | \n", + "
1029 | \n", + "1.999200 | \n", + "
1030 | \n", + "1.522700 | \n", + "
1031 | \n", + "1.206000 | \n", + "
1032 | \n", + "1.448000 | \n", + "
1033 | \n", + "1.549400 | \n", + "
1034 | \n", + "1.835900 | \n", + "
1035 | \n", + "1.354500 | \n", + "
1036 | \n", + "1.361400 | \n", + "
1037 | \n", + "1.382400 | \n", + "
1038 | \n", + "1.966800 | \n", + "
1039 | \n", + "1.604800 | \n", + "
1040 | \n", + "1.461500 | \n", + "
1041 | \n", + "1.213500 | \n", + "
1042 | \n", + "1.228800 | \n", + "
1043 | \n", + "0.991400 | \n", + "
1044 | \n", + "1.196600 | \n", + "
1045 | \n", + "1.400300 | \n", + "
1046 | \n", + "1.420000 | \n", + "
1047 | \n", + "1.525200 | \n", + "
1048 | \n", + "1.411400 | \n", + "
1049 | \n", + "1.460500 | \n", + "
1050 | \n", + "1.420600 | \n", + "
1051 | \n", + "1.494700 | \n", + "
1052 | \n", + "1.551000 | \n", + "
1053 | \n", + "1.313700 | \n", + "
1054 | \n", + "1.379600 | \n", + "
1055 | \n", + "1.488500 | \n", + "
1056 | \n", + "1.287200 | \n", + "
1057 | \n", + "1.806800 | \n", + "
1058 | \n", + "1.338600 | \n", + "
1059 | \n", + "1.134000 | \n", + "
1060 | \n", + "1.426300 | \n", + "
1061 | \n", + "1.611300 | \n", + "
1062 | \n", + "1.382200 | \n", + "
1063 | \n", + "2.067200 | \n", + "
1064 | \n", + "1.176700 | \n", + "
1065 | \n", + "1.128700 | \n", + "
1066 | \n", + "1.119900 | \n", + "
1067 | \n", + "1.895900 | \n", + "
1068 | \n", + "1.778500 | \n", + "
1069 | \n", + "1.480700 | \n", + "
1070 | \n", + "1.344300 | \n", + "
1071 | \n", + "1.535200 | \n", + "
1072 | \n", + "1.550700 | \n", + "
1073 | \n", + "1.289900 | \n", + "
1074 | \n", + "1.590300 | \n", + "
1075 | \n", + "1.492500 | \n", + "
1076 | \n", + "1.674200 | \n", + "
1077 | \n", + "1.299800 | \n", + "
1078 | \n", + "1.476000 | \n", + "
1079 | \n", + "1.461400 | \n", + "
1080 | \n", + "1.435700 | \n", + "
1081 | \n", + "1.338900 | \n", + "
1082 | \n", + "1.746200 | \n", + "
1083 | \n", + "1.885400 | \n", + "
1084 | \n", + "1.761700 | \n", + "
1085 | \n", + "1.308700 | \n", + "
1086 | \n", + "1.307000 | \n", + "
1087 | \n", + "1.316900 | \n", + "
1088 | \n", + "1.603100 | \n", + "
1089 | \n", + "1.658300 | \n", + "
1090 | \n", + "1.408300 | \n", + "
1091 | \n", + "1.949200 | \n", + "
1092 | \n", + "1.438600 | \n", + "
1093 | \n", + "1.185700 | \n", + "
1094 | \n", + "1.747400 | \n", + "
1095 | \n", + "1.380200 | \n", + "
1096 | \n", + "1.158500 | \n", + "
1097 | \n", + "1.666300 | \n", + "
1098 | \n", + "1.125300 | \n", + "
1099 | \n", + "2.101900 | \n", + "
1100 | \n", + "1.879300 | \n", + "
1101 | \n", + "1.678000 | \n", + "
1102 | \n", + "1.548500 | \n", + "
1103 | \n", + "1.427300 | \n", + "
1104 | \n", + "2.457600 | \n", + "
1105 | \n", + "1.466800 | \n", + "
1106 | \n", + "1.528700 | \n", + "
1107 | \n", + "1.625600 | \n", + "
1108 | \n", + "1.894700 | \n", + "
1109 | \n", + "1.312800 | \n", + "
1110 | \n", + "1.518700 | \n", + "
1111 | \n", + "1.514100 | \n", + "
1112 | \n", + "2.010600 | \n", + "
1113 | \n", + "1.466800 | \n", + "
1114 | \n", + "1.521000 | \n", + "
1115 | \n", + "1.305200 | \n", + "
1116 | \n", + "1.599000 | \n", + "
1117 | \n", + "1.804800 | \n", + "
1118 | \n", + "1.336100 | \n", + "
1119 | \n", + "1.254600 | \n", + "
1120 | \n", + "1.398800 | \n", + "
1121 | \n", + "1.063300 | \n", + "
1122 | \n", + "1.207000 | \n", + "
1123 | \n", + "1.495300 | \n", + "
1124 | \n", + "1.231300 | \n", + "
1125 | \n", + "1.728200 | \n", + "
1126 | \n", + "2.126300 | \n", + "
1127 | \n", + "2.018500 | \n", + "
1128 | \n", + "1.624200 | \n", + "
1129 | \n", + "1.161500 | \n", + "
1130 | \n", + "1.503800 | \n", + "
1131 | \n", + "1.332400 | \n", + "
1132 | \n", + "1.562900 | \n", + "
1133 | \n", + "1.580200 | \n", + "
1134 | \n", + "1.498400 | \n", + "
1135 | \n", + "1.512900 | \n", + "
1136 | \n", + "1.405900 | \n", + "
1137 | \n", + "1.751200 | \n", + "
1138 | \n", + "1.314200 | \n", + "
1139 | \n", + "1.039400 | \n", + "
1140 | \n", + "1.476400 | \n", + "
1141 | \n", + "1.444100 | \n", + "
1142 | \n", + "1.300000 | \n", + "
1143 | \n", + "1.718400 | \n", + "
1144 | \n", + "1.544500 | \n", + "
1145 | \n", + "1.687100 | \n", + "
1146 | \n", + "1.323000 | \n", + "
1147 | \n", + "1.182300 | \n", + "
1148 | \n", + "1.496600 | \n", + "
1149 | \n", + "1.649600 | \n", + "
1150 | \n", + "1.240100 | \n", + "
1151 | \n", + "1.802500 | \n", + "
1152 | \n", + "1.696200 | \n", + "
1153 | \n", + "1.507300 | \n", + "
1154 | \n", + "1.295000 | \n", + "
1155 | \n", + "1.589200 | \n", + "
1156 | \n", + "1.376600 | \n", + "
1157 | \n", + "1.524900 | \n", + "
1158 | \n", + "1.631700 | \n", + "
1159 | \n", + "1.017000 | \n", + "
1160 | \n", + "1.094400 | \n", + "
1161 | \n", + "1.613600 | \n", + "
1162 | \n", + "1.334200 | \n", + "
1163 | \n", + "1.955000 | \n", + "
1164 | \n", + "1.406800 | \n", + "
1165 | \n", + "1.483400 | \n", + "
1166 | \n", + "1.711400 | \n", + "
1167 | \n", + "1.293600 | \n", + "
1168 | \n", + "1.297100 | \n", + "
1169 | \n", + "1.654000 | \n", + "
1170 | \n", + "1.539000 | \n", + "
1171 | \n", + "1.529700 | \n", + "
1172 | \n", + "1.385300 | \n", + "
1173 | \n", + "1.089500 | \n", + "
1174 | \n", + "1.307900 | \n", + "
1175 | \n", + "1.504800 | \n", + "
1176 | \n", + "1.451600 | \n", + "
1177 | \n", + "1.484700 | \n", + "
1178 | \n", + "1.412200 | \n", + "
1179 | \n", + "1.428500 | \n", + "
1180 | \n", + "1.376700 | \n", + "
1181 | \n", + "1.706000 | \n", + "
1182 | \n", + "1.187800 | \n", + "
1183 | \n", + "1.530900 | \n", + "
1184 | \n", + "1.286400 | \n", + "
1185 | \n", + "1.724400 | \n", + "
1186 | \n", + "1.609100 | \n", + "
1187 | \n", + "1.617900 | \n", + "
1188 | \n", + "1.065000 | \n", + "
1189 | \n", + "1.117100 | \n", + "
1190 | \n", + "1.956700 | \n", + "
1191 | \n", + "1.354700 | \n", + "
1192 | \n", + "1.865100 | \n", + "
1193 | \n", + "2.131100 | \n", + "
1194 | \n", + "1.591400 | \n", + "
1195 | \n", + "1.849500 | \n", + "
1196 | \n", + "1.525500 | \n", + "
1197 | \n", + "1.450900 | \n", + "
1198 | \n", + "1.307400 | \n", + "
1199 | \n", + "1.872700 | \n", + "
1200 | \n", + "1.588200 | \n", + "
1201 | \n", + "1.449100 | \n", + "
1202 | \n", + "1.411400 | \n", + "
1203 | \n", + "1.585400 | \n", + "
1204 | \n", + "1.290300 | \n", + "
1205 | \n", + "1.147200 | \n", + "
1206 | \n", + "1.840600 | \n", + "
1207 | \n", + "1.325800 | \n", + "
1208 | \n", + "1.216900 | \n", + "
1209 | \n", + "1.902600 | \n", + "
1210 | \n", + "1.520800 | \n", + "
1211 | \n", + "1.263300 | \n", + "
1212 | \n", + "1.249300 | \n", + "
1213 | \n", + "1.093500 | \n", + "
1214 | \n", + "1.435600 | \n", + "
1215 | \n", + "1.266300 | \n", + "
1216 | \n", + "1.614300 | \n", + "
1217 | \n", + "1.778400 | \n", + "
1218 | \n", + "1.526800 | \n", + "
1219 | \n", + "1.430300 | \n", + "
1220 | \n", + "1.375500 | \n", + "
1221 | \n", + "1.417200 | \n", + "
1222 | \n", + "1.565500 | \n", + "
1223 | \n", + "1.168900 | \n", + "
1224 | \n", + "1.239800 | \n", + "
1225 | \n", + "1.166800 | \n", + "
1226 | \n", + "1.398100 | \n", + "
1227 | \n", + "1.797500 | \n", + "
1228 | \n", + "1.994600 | \n", + "
1229 | \n", + "1.690400 | \n", + "
1230 | \n", + "1.449900 | \n", + "
1231 | \n", + "1.287500 | \n", + "
1232 | \n", + "1.498600 | \n", + "
1233 | \n", + "1.461200 | \n", + "
1234 | \n", + "1.885600 | \n", + "
1235 | \n", + "1.407800 | \n", + "
1236 | \n", + "1.654600 | \n", + "
1237 | \n", + "1.026400 | \n", + "
1238 | \n", + "1.328800 | \n", + "
1239 | \n", + "1.286100 | \n", + "
1240 | \n", + "1.599900 | \n", + "
1241 | \n", + "1.119900 | \n", + "
1242 | \n", + "1.882000 | \n", + "
1243 | \n", + "1.423000 | \n", + "
1244 | \n", + "1.220800 | \n", + "
1245 | \n", + "1.370100 | \n", + "
1246 | \n", + "1.252100 | \n", + "
1247 | \n", + "1.357900 | \n", + "
1248 | \n", + "1.383800 | \n", + "
1249 | \n", + "1.654400 | \n", + "
1250 | \n", + "1.593600 | \n", + "
1251 | \n", + "1.137000 | \n", + "
1252 | \n", + "1.604400 | \n", + "
1253 | \n", + "1.332700 | \n", + "
1254 | \n", + "1.173700 | \n", + "
1255 | \n", + "1.276600 | \n", + "
1256 | \n", + "1.261000 | \n", + "
1257 | \n", + "1.435400 | \n", + "
1258 | \n", + "1.003500 | \n", + "
1259 | \n", + "1.403300 | \n", + "
1260 | \n", + "1.775300 | \n", + "
1261 | \n", + "1.873000 | \n", + "
1262 | \n", + "2.009900 | \n", + "
1263 | \n", + "1.677300 | \n", + "
1264 | \n", + "1.659600 | \n", + "
1265 | \n", + "1.565400 | \n", + "
1266 | \n", + "1.737200 | \n", + "
1267 | \n", + "1.250500 | \n", + "
1268 | \n", + "1.790900 | \n", + "
1269 | \n", + "1.344100 | \n", + "
1270 | \n", + "1.609300 | \n", + "
1271 | \n", + "1.532600 | \n", + "
1272 | \n", + "1.511800 | \n", + "
1273 | \n", + "1.218100 | \n", + "
1274 | \n", + "1.897000 | \n", + "
1275 | \n", + "1.576700 | \n", + "
1276 | \n", + "1.715200 | \n", + "
1277 | \n", + "1.483700 | \n", + "
1278 | \n", + "1.669100 | \n", + "
1279 | \n", + "1.831100 | \n", + "
1280 | \n", + "1.341500 | \n", + "
1281 | \n", + "1.286600 | \n", + "
1282 | \n", + "2.172900 | \n", + "
1283 | \n", + "1.279800 | \n", + "
1284 | \n", + "1.541100 | \n", + "
1285 | \n", + "1.510900 | \n", + "
1286 | \n", + "1.738900 | \n", + "
1287 | \n", + "2.022900 | \n", + "
1288 | \n", + "1.392300 | \n", + "
1289 | \n", + "1.726400 | \n", + "
1290 | \n", + "1.726200 | \n", + "
1291 | \n", + "1.194800 | \n", + "
1292 | \n", + "1.868600 | \n", + "
1293 | \n", + "1.385900 | \n", + "
1294 | \n", + "1.286000 | \n", + "
1295 | \n", + "1.194300 | \n", + "
1296 | \n", + "1.382000 | \n", + "
1297 | \n", + "1.404000 | \n", + "
1298 | \n", + "1.408100 | \n", + "
1299 | \n", + "1.501500 | \n", + "
1300 | \n", + "1.490700 | \n", + "
1301 | \n", + "1.724600 | \n", + "
1302 | \n", + "1.490200 | \n", + "
1303 | \n", + "1.325500 | \n", + "
1304 | \n", + "1.328100 | \n", + "
1305 | \n", + "1.446800 | \n", + "
1306 | \n", + "1.585600 | \n", + "
1307 | \n", + "1.568600 | \n", + "
1308 | \n", + "1.239700 | \n", + "
1309 | \n", + "1.486200 | \n", + "
1310 | \n", + "1.259000 | \n", + "
1311 | \n", + "1.582600 | \n", + "
1312 | \n", + "1.492900 | \n", + "
1313 | \n", + "1.945200 | \n", + "
1314 | \n", + "1.244300 | \n", + "
1315 | \n", + "1.230100 | \n", + "
1316 | \n", + "1.198100 | \n", + "
1317 | \n", + "1.960200 | \n", + "
1318 | \n", + "1.218300 | \n", + "
1319 | \n", + "1.480200 | \n", + "
1320 | \n", + "2.038200 | \n", + "
1321 | \n", + "1.254900 | \n", + "
1322 | \n", + "1.398200 | \n", + "
1323 | \n", + "2.160600 | \n", + "
1324 | \n", + "1.808800 | \n", + "
1325 | \n", + "1.161100 | \n", + "
1326 | \n", + "1.524300 | \n", + "
1327 | \n", + "1.753600 | \n", + "
1328 | \n", + "1.516600 | \n", + "
1329 | \n", + "1.550100 | \n", + "
1330 | \n", + "1.396700 | \n", + "
1331 | \n", + "1.267800 | \n", + "
1332 | \n", + "1.887800 | \n", + "
1333 | \n", + "1.668900 | \n", + "
1334 | \n", + "1.200200 | \n", + "
1335 | \n", + "1.406400 | \n", + "
1336 | \n", + "1.766200 | \n", + "
1337 | \n", + "1.383500 | \n", + "
1338 | \n", + "1.061600 | \n", + "
1339 | \n", + "1.454800 | \n", + "
1340 | \n", + "1.675400 | \n", + "
1341 | \n", + "1.154200 | \n", + "
1342 | \n", + "1.308500 | \n", + "
1343 | \n", + "1.265900 | \n", + "
1344 | \n", + "1.240400 | \n", + "
1345 | \n", + "1.778700 | \n", + "
1346 | \n", + "1.686400 | \n", + "
1347 | \n", + "1.446200 | \n", + "
1348 | \n", + "1.640200 | \n", + "
1349 | \n", + "1.769500 | \n", + "
1350 | \n", + "1.058200 | \n", + "
1351 | \n", + "1.490600 | \n", + "
1352 | \n", + "1.314700 | \n", + "
1353 | \n", + "1.228400 | \n", + "
1354 | \n", + "2.681100 | \n", + "
1355 | \n", + "1.487300 | \n", + "
1356 | \n", + "1.359300 | \n", + "
1357 | \n", + "1.479600 | \n", + "
1358 | \n", + "1.591000 | \n", + "
1359 | \n", + "1.147800 | \n", + "
1360 | \n", + "1.390600 | \n", + "
1361 | \n", + "1.457000 | \n", + "
1362 | \n", + "1.591500 | \n", + "
1363 | \n", + "1.562300 | \n", + "
1364 | \n", + "1.697800 | \n", + "
1365 | \n", + "1.536500 | \n", + "
1366 | \n", + "1.566500 | \n", + "
1367 | \n", + "2.623000 | \n", + "
1368 | \n", + "1.648400 | \n", + "
1369 | \n", + "1.464800 | \n", + "
1370 | \n", + "1.142600 | \n", + "
1371 | \n", + "2.087000 | \n", + "
1372 | \n", + "1.228200 | \n", + "
1373 | \n", + "1.964000 | \n", + "
1374 | \n", + "1.372000 | \n", + "
1375 | \n", + "1.094300 | \n", + "
1376 | \n", + "1.238100 | \n", + "
1377 | \n", + "1.355000 | \n", + "
1378 | \n", + "1.280700 | \n", + "
1379 | \n", + "1.578500 | \n", + "
1380 | \n", + "1.246600 | \n", + "
1381 | \n", + "1.291800 | \n", + "
1382 | \n", + "1.567100 | \n", + "
1383 | \n", + "1.458800 | \n", + "
1384 | \n", + "1.404600 | \n", + "
1385 | \n", + "1.314400 | \n", + "
1386 | \n", + "1.485200 | \n", + "
1387 | \n", + "1.367100 | \n", + "
1388 | \n", + "1.231800 | \n", + "
1389 | \n", + "1.532800 | \n", + "
1390 | \n", + "1.629000 | \n", + "
1391 | \n", + "1.505700 | \n", + "
1392 | \n", + "1.467900 | \n", + "
1393 | \n", + "1.284300 | \n", + "
1394 | \n", + "1.297300 | \n", + "
1395 | \n", + "1.279500 | \n", + "
1396 | \n", + "1.329700 | \n", + "
1397 | \n", + "1.841900 | \n", + "
1398 | \n", + "1.885200 | \n", + "
1399 | \n", + "1.766500 | \n", + "
1400 | \n", + "1.658700 | \n", + "
1401 | \n", + "1.696900 | \n", + "
1402 | \n", + "1.440200 | \n", + "
1403 | \n", + "1.331800 | \n", + "
1404 | \n", + "1.311300 | \n", + "
1405 | \n", + "1.169700 | \n", + "
1406 | \n", + "1.191900 | \n", + "
1407 | \n", + "1.683000 | \n", + "
1408 | \n", + "1.387800 | \n", + "
1409 | \n", + "1.485200 | \n", + "
1410 | \n", + "1.584100 | \n", + "
1411 | \n", + "1.308300 | \n", + "
1412 | \n", + "1.844900 | \n", + "
1413 | \n", + "1.937500 | \n", + "
1414 | \n", + "1.423400 | \n", + "
1415 | \n", + "1.248800 | \n", + "
1416 | \n", + "1.455400 | \n", + "
1417 | \n", + "1.848600 | \n", + "
1418 | \n", + "1.498000 | \n", + "
1419 | \n", + "1.732700 | \n", + "
1420 | \n", + "1.614900 | \n", + "
1421 | \n", + "1.280700 | \n", + "
1422 | \n", + "1.175900 | \n", + "
1423 | \n", + "1.487100 | \n", + "
1424 | \n", + "1.210500 | \n", + "
1425 | \n", + "1.976600 | \n", + "
1426 | \n", + "1.080200 | \n", + "
1427 | \n", + "1.539300 | \n", + "
1428 | \n", + "1.173200 | \n", + "
1429 | \n", + "1.548800 | \n", + "
1430 | \n", + "1.209700 | \n", + "
1431 | \n", + "1.931000 | \n", + "
1432 | \n", + "1.474700 | \n", + "
1433 | \n", + "1.220800 | \n", + "
1434 | \n", + "1.301300 | \n", + "
1435 | \n", + "1.387300 | \n", + "
1436 | \n", + "1.237200 | \n", + "
1437 | \n", + "1.428600 | \n", + "
1438 | \n", + "1.408800 | \n", + "
1439 | \n", + "2.004600 | \n", + "
1440 | \n", + "1.161100 | \n", + "
1441 | \n", + "1.000800 | \n", + "
1442 | \n", + "2.192800 | \n", + "
1443 | \n", + "1.224800 | \n", + "
1444 | \n", + "1.447600 | \n", + "
1445 | \n", + "1.323800 | \n", + "
1446 | \n", + "1.293800 | \n", + "
1447 | \n", + "1.486600 | \n", + "
1448 | \n", + "1.599800 | \n", + "
1449 | \n", + "1.612000 | \n", + "
1450 | \n", + "1.127600 | \n", + "
1451 | \n", + "1.466000 | \n", + "
1452 | \n", + "1.097500 | \n", + "
1453 | \n", + "1.224200 | \n", + "
1454 | \n", + "1.343300 | \n", + "
1455 | \n", + "1.112000 | \n", + "
1456 | \n", + "1.416500 | \n", + "
1457 | \n", + "1.659900 | \n", + "
1458 | \n", + "1.646200 | \n", + "
1459 | \n", + "1.207200 | \n", + "
1460 | \n", + "1.412400 | \n", + "
1461 | \n", + "1.771300 | \n", + "
1462 | \n", + "1.281900 | \n", + "
1463 | \n", + "1.614400 | \n", + "
1464 | \n", + "1.293200 | \n", + "
1465 | \n", + "1.331500 | \n", + "
1466 | \n", + "1.752700 | \n", + "
1467 | \n", + "1.356000 | \n", + "
1468 | \n", + "1.526300 | \n", + "
1469 | \n", + "2.003600 | \n", + "
1470 | \n", + "1.281600 | \n", + "
1471 | \n", + "1.410900 | \n", + "
1472 | \n", + "1.276200 | \n", + "
1473 | \n", + "1.268100 | \n", + "
1474 | \n", + "1.431900 | \n", + "
1475 | \n", + "1.241500 | \n", + "
1476 | \n", + "1.260600 | \n", + "
1477 | \n", + "1.129800 | \n", + "
1478 | \n", + "1.080700 | \n", + "
1479 | \n", + "1.496200 | \n", + "
1480 | \n", + "1.541800 | \n", + "
1481 | \n", + "1.462100 | \n", + "
1482 | \n", + "1.237400 | \n", + "
1483 | \n", + "1.323200 | \n", + "
1484 | \n", + "1.332900 | \n", + "
1485 | \n", + "1.342000 | \n", + "
1486 | \n", + "1.252700 | \n", + "
1487 | \n", + "1.497700 | \n", + "
1488 | \n", + "1.855800 | \n", + "
1489 | \n", + "1.537900 | \n", + "
1490 | \n", + "1.347500 | \n", + "
1491 | \n", + "1.382100 | \n", + "
1492 | \n", + "1.553000 | \n", + "
1493 | \n", + "2.608600 | \n", + "
1494 | \n", + "2.119100 | \n", + "
1495 | \n", + "1.491000 | \n", + "
1496 | \n", + "1.352300 | \n", + "
1497 | \n", + "1.630800 | \n", + "
1498 | \n", + "1.560000 | \n", + "
1499 | \n", + "1.456100 | \n", + "
1500 | \n", + "1.157400 | \n", + "
1501 | \n", + "1.693000 | \n", + "
1502 | \n", + "1.260400 | \n", + "
1503 | \n", + "1.274100 | \n", + "
1504 | \n", + "1.389800 | \n", + "
1505 | \n", + "1.730500 | \n", + "
1506 | \n", + "1.047200 | \n", + "
1507 | \n", + "1.146200 | \n", + "
1508 | \n", + "1.249000 | \n", + "
1509 | \n", + "1.045600 | \n", + "
1510 | \n", + "1.205500 | \n", + "
1511 | \n", + "1.487500 | \n", + "
1512 | \n", + "1.188200 | \n", + "
1513 | \n", + "1.481400 | \n", + "
1514 | \n", + "1.218600 | \n", + "
1515 | \n", + "1.323700 | \n", + "
1516 | \n", + "2.026800 | \n", + "
1517 | \n", + "1.314900 | \n", + "
1518 | \n", + "1.493400 | \n", + "
1519 | \n", + "1.359100 | \n", + "
1520 | \n", + "1.337100 | \n", + "
1521 | \n", + "1.477900 | \n", + "
1522 | \n", + "1.739700 | \n", + "
1523 | \n", + "1.452900 | \n", + "
1524 | \n", + "1.505000 | \n", + "
1525 | \n", + "1.768000 | \n", + "
1526 | \n", + "1.347100 | \n", + "
1527 | \n", + "1.325500 | \n", + "
1528 | \n", + "1.483200 | \n", + "
1529 | \n", + "1.399800 | \n", + "
1530 | \n", + "1.430400 | \n", + "
1531 | \n", + "1.611100 | \n", + "
1532 | \n", + "1.109700 | \n", + "
1533 | \n", + "1.618700 | \n", + "
1534 | \n", + "1.765500 | \n", + "
1535 | \n", + "1.579700 | \n", + "
1536 | \n", + "1.667300 | \n", + "
1537 | \n", + "1.191600 | \n", + "
1538 | \n", + "1.372400 | \n", + "
1539 | \n", + "1.266700 | \n", + "
1540 | \n", + "1.937600 | \n", + "
1541 | \n", + "1.326100 | \n", + "
1542 | \n", + "1.659100 | \n", + "
1543 | \n", + "1.468500 | \n", + "
1544 | \n", + "2.073200 | \n", + "
1545 | \n", + "1.997600 | \n", + "
1546 | \n", + "1.534800 | \n", + "
1547 | \n", + "1.339500 | \n", + "
1548 | \n", + "1.869700 | \n", + "
1549 | \n", + "1.356400 | \n", + "
1550 | \n", + "1.344300 | \n", + "
1551 | \n", + "1.465400 | \n", + "
1552 | \n", + "1.675600 | \n", + "
1553 | \n", + "2.032900 | \n", + "
1554 | \n", + "1.158700 | \n", + "
1555 | \n", + "1.408200 | \n", + "
1556 | \n", + "1.188300 | \n", + "
1557 | \n", + "1.628000 | \n", + "
1558 | \n", + "1.787000 | \n", + "
1559 | \n", + "1.257100 | \n", + "
1560 | \n", + "1.495700 | \n", + "
1561 | \n", + "1.378000 | \n", + "
1562 | \n", + "1.278900 | \n", + "
1563 | \n", + "1.384600 | \n", + "
1564 | \n", + "1.221200 | \n", + "
1565 | \n", + "1.072200 | \n", + "
1566 | \n", + "1.319900 | \n", + "
1567 | \n", + "1.257300 | \n", + "
1568 | \n", + "1.475100 | \n", + "
1569 | \n", + "1.778200 | \n", + "
1570 | \n", + "1.154000 | \n", + "
1571 | \n", + "1.781600 | \n", + "
1572 | \n", + "1.409800 | \n", + "
1573 | \n", + "1.491800 | \n", + "
1574 | \n", + "1.261600 | \n", + "
1575 | \n", + "1.139500 | \n", + "
1576 | \n", + "1.614000 | \n", + "
1577 | \n", + "1.224200 | \n", + "
1578 | \n", + "1.096800 | \n", + "
1579 | \n", + "1.484000 | \n", + "
1580 | \n", + "1.140000 | \n", + "
1581 | \n", + "1.441500 | \n", + "
1582 | \n", + "1.300100 | \n", + "
1583 | \n", + "1.394300 | \n", + "
1584 | \n", + "1.371300 | \n", + "
1585 | \n", + "1.244600 | \n", + "
1586 | \n", + "1.527500 | \n", + "
1587 | \n", + "2.437100 | \n", + "
1588 | \n", + "1.579000 | \n", + "
1589 | \n", + "1.894700 | \n", + "
1590 | \n", + "1.187700 | \n", + "
1591 | \n", + "1.296600 | \n", + "
1592 | \n", + "2.054600 | \n", + "
1593 | \n", + "1.280000 | \n", + "
1594 | \n", + "1.070100 | \n", + "
1595 | \n", + "1.627400 | \n", + "
1596 | \n", + "1.642800 | \n", + "
1597 | \n", + "1.528000 | \n", + "
1598 | \n", + "1.416800 | \n", + "
1599 | \n", + "1.370400 | \n", + "
1600 | \n", + "1.583100 | \n", + "
1601 | \n", + "1.469200 | \n", + "
1602 | \n", + "1.558900 | \n", + "
1603 | \n", + "1.554000 | \n", + "
1604 | \n", + "1.136600 | \n", + "
1605 | \n", + "1.786800 | \n", + "
1606 | \n", + "1.758200 | \n", + "
1607 | \n", + "0.953700 | \n", + "
1608 | \n", + "1.620400 | \n", + "
1609 | \n", + "1.345700 | \n", + "
1610 | \n", + "1.281400 | \n", + "
1611 | \n", + "1.447800 | \n", + "
1612 | \n", + "2.103000 | \n", + "
1613 | \n", + "1.548000 | \n", + "
1614 | \n", + "1.446800 | \n", + "
1615 | \n", + "1.200200 | \n", + "
1616 | \n", + "2.596100 | \n", + "
1617 | \n", + "1.905400 | \n", + "
1618 | \n", + "1.535200 | \n", + "
1619 | \n", + "1.465600 | \n", + "
1620 | \n", + "1.019500 | \n", + "
1621 | \n", + "1.119800 | \n", + "
1622 | \n", + "1.291300 | \n", + "
1623 | \n", + "1.706000 | \n", + "
1624 | \n", + "1.296200 | \n", + "
1625 | \n", + "1.559600 | \n", + "
1626 | \n", + "1.714100 | \n", + "
1627 | \n", + "1.329800 | \n", + "
1628 | \n", + "1.166700 | \n", + "
1629 | \n", + "1.662600 | \n", + "
1630 | \n", + "1.293900 | \n", + "
1631 | \n", + "1.357800 | \n", + "
1632 | \n", + "1.420500 | \n", + "
1633 | \n", + "1.679700 | \n", + "
1634 | \n", + "1.514300 | \n", + "
1635 | \n", + "1.709600 | \n", + "
1636 | \n", + "1.140300 | \n", + "
1637 | \n", + "1.351100 | \n", + "
1638 | \n", + "1.620900 | \n", + "
1639 | \n", + "1.325700 | \n", + "
1640 | \n", + "1.669100 | \n", + "
1641 | \n", + "1.196700 | \n", + "
1642 | \n", + "1.799600 | \n", + "
1643 | \n", + "2.356400 | \n", + "
1644 | \n", + "1.440900 | \n", + "
1645 | \n", + "1.170000 | \n", + "
1646 | \n", + "1.751900 | \n", + "
1647 | \n", + "1.661000 | \n", + "
1648 | \n", + "1.412100 | \n", + "
1649 | \n", + "1.389200 | \n", + "
1650 | \n", + "1.585800 | \n", + "
1651 | \n", + "1.676900 | \n", + "
1652 | \n", + "1.647500 | \n", + "
1653 | \n", + "1.095800 | \n", + "
1654 | \n", + "1.028700 | \n", + "
1655 | \n", + "1.265500 | \n", + "
1656 | \n", + "1.192700 | \n", + "
1657 | \n", + "1.682300 | \n", + "
1658 | \n", + "1.137500 | \n", + "
1659 | \n", + "1.226300 | \n", + "
1660 | \n", + "1.419300 | \n", + "
1661 | \n", + "1.490500 | \n", + "
1662 | \n", + "1.404000 | \n", + "
1663 | \n", + "1.138800 | \n", + "
1664 | \n", + "1.637600 | \n", + "
1665 | \n", + "1.024700 | \n", + "
1666 | \n", + "1.229500 | \n", + "
1667 | \n", + "1.366200 | \n", + "
1668 | \n", + "1.519400 | \n", + "
1669 | \n", + "1.155800 | \n", + "
1670 | \n", + "1.503000 | \n", + "
1671 | \n", + "1.375900 | \n", + "
1672 | \n", + "1.220400 | \n", + "
1673 | \n", + "2.008600 | \n", + "
1674 | \n", + "1.705800 | \n", + "
1675 | \n", + "1.622200 | \n", + "
1676 | \n", + "1.551000 | \n", + "
1677 | \n", + "1.181000 | \n", + "
1678 | \n", + "2.058300 | \n", + "
1679 | \n", + "1.616300 | \n", + "
1680 | \n", + "1.422900 | \n", + "
1681 | \n", + "0.961000 | \n", + "
1682 | \n", + "1.238500 | \n", + "
1683 | \n", + "1.534600 | \n", + "
1684 | \n", + "1.718300 | \n", + "
1685 | \n", + "1.256400 | \n", + "
1686 | \n", + "1.467500 | \n", + "
1687 | \n", + "1.802200 | \n", + "
1688 | \n", + "1.959200 | \n", + "
1689 | \n", + "1.751000 | \n", + "
1690 | \n", + "1.609300 | \n", + "
1691 | \n", + "1.105800 | \n", + "
1692 | \n", + "1.000300 | \n", + "
1693 | \n", + "2.068200 | \n", + "
1694 | \n", + "1.725000 | \n", + "
1695 | \n", + "1.488500 | \n", + "
1696 | \n", + "1.433400 | \n", + "
1697 | \n", + "1.736800 | \n", + "
1698 | \n", + "1.422700 | \n", + "
1699 | \n", + "1.147900 | \n", + "
1700 | \n", + "1.804000 | \n", + "
1701 | \n", + "2.336700 | \n", + "
1702 | \n", + "1.770800 | \n", + "
1703 | \n", + "1.413700 | \n", + "
1704 | \n", + "1.201600 | \n", + "
1705 | \n", + "1.279500 | \n", + "
1706 | \n", + "1.805600 | \n", + "
1707 | \n", + "1.776300 | \n", + "
1708 | \n", + "1.390500 | \n", + "
1709 | \n", + "1.560100 | \n", + "
1710 | \n", + "1.389400 | \n", + "
1711 | \n", + "1.311000 | \n", + "
1712 | \n", + "1.451800 | \n", + "
1713 | \n", + "1.491600 | \n", + "
1714 | \n", + "1.891500 | \n", + "
1715 | \n", + "1.476800 | \n", + "
1716 | \n", + "1.431300 | \n", + "
1717 | \n", + "1.287700 | \n", + "
1718 | \n", + "1.384600 | \n", + "
1719 | \n", + "1.401400 | \n", + "
1720 | \n", + "1.637300 | \n", + "
1721 | \n", + "1.033600 | \n", + "
1722 | \n", + "1.715000 | \n", + "
1723 | \n", + "1.154200 | \n", + "
1724 | \n", + "1.557200 | \n", + "
1725 | \n", + "1.558400 | \n", + "
1726 | \n", + "1.122800 | \n", + "
1727 | \n", + "1.365000 | \n", + "
1728 | \n", + "1.269300 | \n", + "
1729 | \n", + "1.484500 | \n", + "
1730 | \n", + "1.556000 | \n", + "
1731 | \n", + "1.230000 | \n", + "
1732 | \n", + "1.976800 | \n", + "
1733 | \n", + "1.576700 | \n", + "
1734 | \n", + "1.796700 | \n", + "
1735 | \n", + "1.328300 | \n", + "
1736 | \n", + "1.240400 | \n", + "
1737 | \n", + "1.299600 | \n", + "
1738 | \n", + "1.243100 | \n", + "
1739 | \n", + "1.652900 | \n", + "
1740 | \n", + "1.394200 | \n", + "
1741 | \n", + "2.429400 | \n", + "
1742 | \n", + "1.249000 | \n", + "
1743 | \n", + "1.087400 | \n", + "
1744 | \n", + "1.984900 | \n", + "
1745 | \n", + "1.716300 | \n", + "
1746 | \n", + "1.388500 | \n", + "
1747 | \n", + "1.552100 | \n", + "
1748 | \n", + "1.265400 | \n", + "
1749 | \n", + "1.290600 | \n", + "
1750 | \n", + "1.256300 | \n", + "
1751 | \n", + "1.636700 | \n", + "
1752 | \n", + "1.518100 | \n", + "
1753 | \n", + "1.470100 | \n", + "
1754 | \n", + "1.171900 | \n", + "
1755 | \n", + "1.188500 | \n", + "
1756 | \n", + "1.068700 | \n", + "
1757 | \n", + "1.221800 | \n", + "
1758 | \n", + "1.329400 | \n", + "
1759 | \n", + "1.368200 | \n", + "
1760 | \n", + "1.488300 | \n", + "
1761 | \n", + "1.155600 | \n", + "
1762 | \n", + "1.554500 | \n", + "
1763 | \n", + "1.608900 | \n", + "
1764 | \n", + "1.308300 | \n", + "
1765 | \n", + "1.215500 | \n", + "
1766 | \n", + "1.417500 | \n", + "
1767 | \n", + "1.134500 | \n", + "
1768 | \n", + "1.357100 | \n", + "
1769 | \n", + "1.532100 | \n", + "
1770 | \n", + "1.204100 | \n", + "
1771 | \n", + "1.691600 | \n", + "
1772 | \n", + "1.774600 | \n", + "
1773 | \n", + "0.943600 | \n", + "
1774 | \n", + "1.458000 | \n", + "
1775 | \n", + "1.329100 | \n", + "
1776 | \n", + "1.531200 | \n", + "
1777 | \n", + "1.644400 | \n", + "
1778 | \n", + "1.598000 | \n", + "
1779 | \n", + "1.380400 | \n", + "
1780 | \n", + "1.974700 | \n", + "
1781 | \n", + "1.094100 | \n", + "
1782 | \n", + "1.476000 | \n", + "
1783 | \n", + "1.434500 | \n", + "
1784 | \n", + "1.174300 | \n", + "
1785 | \n", + "1.293600 | \n", + "
1786 | \n", + "1.651100 | \n", + "
1787 | \n", + "1.706500 | \n", + "
1788 | \n", + "1.309400 | \n", + "
1789 | \n", + "1.055200 | \n", + "
1790 | \n", + "1.560100 | \n", + "
1791 | \n", + "1.621100 | \n", + "
1792 | \n", + "1.362200 | \n", + "
1793 | \n", + "1.581300 | \n", + "
1794 | \n", + "1.439300 | \n", + "
1795 | \n", + "1.299800 | \n", + "
1796 | \n", + "1.108900 | \n", + "
1797 | \n", + "1.234900 | \n", + "
1798 | \n", + "1.420900 | \n", + "
1799 | \n", + "1.247500 | \n", + "
1800 | \n", + "1.209700 | \n", + "
1801 | \n", + "1.833500 | \n", + "
1802 | \n", + "1.369300 | \n", + "
1803 | \n", + "1.236900 | \n", + "
1804 | \n", + "1.576300 | \n", + "
1805 | \n", + "1.491300 | \n", + "
1806 | \n", + "1.096700 | \n", + "
1807 | \n", + "1.299100 | \n", + "
1808 | \n", + "1.450900 | \n", + "
1809 | \n", + "1.293600 | \n", + "
1810 | \n", + "1.529600 | \n", + "
1811 | \n", + "1.606500 | \n", + "
1812 | \n", + "1.229800 | \n", + "
1813 | \n", + "1.729600 | \n", + "
1814 | \n", + "2.069400 | \n", + "
1815 | \n", + "1.329100 | \n", + "
1816 | \n", + "1.600400 | \n", + "
1817 | \n", + "1.749900 | \n", + "
1818 | \n", + "1.199500 | \n", + "
1819 | \n", + "1.189900 | \n", + "
1820 | \n", + "1.206800 | \n", + "
1821 | \n", + "2.264400 | \n", + "
1822 | \n", + "1.283800 | \n", + "
1823 | \n", + "1.405200 | \n", + "
1824 | \n", + "1.227800 | \n", + "
1825 | \n", + "1.621800 | \n", + "
1826 | \n", + "1.393800 | \n", + "
1827 | \n", + "1.234300 | \n", + "
1828 | \n", + "1.360500 | \n", + "
1829 | \n", + "1.422900 | \n", + "
1830 | \n", + "1.388800 | \n", + "
1831 | \n", + "1.206300 | \n", + "
1832 | \n", + "1.281400 | \n", + "
1833 | \n", + "1.219400 | \n", + "
1834 | \n", + "1.233900 | \n", + "
1835 | \n", + "1.692200 | \n", + "
1836 | \n", + "1.649800 | \n", + "
1837 | \n", + "1.328300 | \n", + "
1838 | \n", + "1.920600 | \n", + "
1839 | \n", + "1.649000 | \n", + "
1840 | \n", + "1.306800 | \n", + "
1841 | \n", + "1.040500 | \n", + "
1842 | \n", + "1.506200 | \n", + "
1843 | \n", + "1.162700 | \n", + "
1844 | \n", + "1.144300 | \n", + "
1845 | \n", + "1.752300 | \n", + "
1846 | \n", + "1.480600 | \n", + "
1847 | \n", + "1.344200 | \n", + "
1848 | \n", + "1.239000 | \n", + "
1849 | \n", + "1.035800 | \n", + "
1850 | \n", + "1.217000 | \n", + "
1851 | \n", + "1.141900 | \n", + "
1852 | \n", + "1.149500 | \n", + "
1853 | \n", + "1.251000 | \n", + "
1854 | \n", + "1.430700 | \n", + "
1855 | \n", + "1.378100 | \n", + "
1856 | \n", + "1.654700 | \n", + "
1857 | \n", + "1.147900 | \n", + "
1858 | \n", + "1.401800 | \n", + "
1859 | \n", + "1.811800 | \n", + "
1860 | \n", + "1.690600 | \n", + "
1861 | \n", + "1.007700 | \n", + "
1862 | \n", + "1.311000 | \n", + "
1863 | \n", + "1.186500 | \n", + "
1864 | \n", + "1.114800 | \n", + "
1865 | \n", + "1.577400 | \n", + "
1866 | \n", + "1.390000 | \n", + "
1867 | \n", + "1.382800 | \n", + "
1868 | \n", + "1.575000 | \n", + "
1869 | \n", + "1.406900 | \n", + "
1870 | \n", + "1.411900 | \n", + "
1871 | \n", + "1.071300 | \n", + "
1872 | \n", + "1.575200 | \n", + "
1873 | \n", + "1.449300 | \n", + "
1874 | \n", + "1.752000 | \n", + "
1875 | \n", + "1.119500 | \n", + "
1876 | \n", + "1.629200 | \n", + "
1877 | \n", + "1.250900 | \n", + "
1878 | \n", + "1.278500 | \n", + "
1879 | \n", + "1.146100 | \n", + "
1880 | \n", + "1.473300 | \n", + "
1881 | \n", + "1.767300 | \n", + "
1882 | \n", + "2.117000 | \n", + "
1883 | \n", + "1.203400 | \n", + "
1884 | \n", + "1.110900 | \n", + "
1885 | \n", + "1.209700 | \n", + "
1886 | \n", + "1.846700 | \n", + "
1887 | \n", + "1.157100 | \n", + "
1888 | \n", + "1.283200 | \n", + "
1889 | \n", + "1.315900 | \n", + "
1890 | \n", + "1.324700 | \n", + "
1891 | \n", + "1.127500 | \n", + "
1892 | \n", + "1.395200 | \n", + "
1893 | \n", + "1.597100 | \n", + "
1894 | \n", + "1.311900 | \n", + "
1895 | \n", + "1.535100 | \n", + "
1896 | \n", + "1.238000 | \n", + "
1897 | \n", + "1.085500 | \n", + "
1898 | \n", + "2.029100 | \n", + "
1899 | \n", + "1.333500 | \n", + "
1900 | \n", + "2.012700 | \n", + "
1901 | \n", + "1.641400 | \n", + "
1902 | \n", + "1.488000 | \n", + "
1903 | \n", + "1.340500 | \n", + "
1904 | \n", + "1.455900 | \n", + "
1905 | \n", + "1.677300 | \n", + "
1906 | \n", + "1.308700 | \n", + "
1907 | \n", + "1.223900 | \n", + "
1908 | \n", + "1.346900 | \n", + "
1909 | \n", + "1.164800 | \n", + "
1910 | \n", + "1.174300 | \n", + "
1911 | \n", + "1.026200 | \n", + "
1912 | \n", + "1.380600 | \n", + "
1913 | \n", + "1.522100 | \n", + "
1914 | \n", + "1.313400 | \n", + "
1915 | \n", + "1.511100 | \n", + "
1916 | \n", + "1.089300 | \n", + "
1917 | \n", + "1.535000 | \n", + "
1918 | \n", + "1.491000 | \n", + "
1919 | \n", + "2.140200 | \n", + "
1920 | \n", + "1.641000 | \n", + "
1921 | \n", + "1.373200 | \n", + "
1922 | \n", + "1.744200 | \n", + "
1923 | \n", + "1.527400 | \n", + "
1924 | \n", + "1.944600 | \n", + "
1925 | \n", + "1.717700 | \n", + "
1926 | \n", + "1.371700 | \n", + "
1927 | \n", + "1.276700 | \n", + "
1928 | \n", + "1.350800 | \n", + "
1929 | \n", + "1.415100 | \n", + "
1930 | \n", + "1.429200 | \n", + "
1931 | \n", + "1.726000 | \n", + "
1932 | \n", + "1.432200 | \n", + "
1933 | \n", + "1.130500 | \n", + "
1934 | \n", + "1.152500 | \n", + "
1935 | \n", + "1.406900 | \n", + "
1936 | \n", + "0.945800 | \n", + "
1937 | \n", + "2.123700 | \n", + "
1938 | \n", + "1.462600 | \n", + "
1939 | \n", + "1.302800 | \n", + "
1940 | \n", + "1.542700 | \n", + "
1941 | \n", + "1.646700 | \n", + "
1942 | \n", + "1.091100 | \n", + "
1943 | \n", + "1.525800 | \n", + "
1944 | \n", + "1.805100 | \n", + "
1945 | \n", + "1.385600 | \n", + "
1946 | \n", + "1.384300 | \n", + "
1947 | \n", + "1.424400 | \n", + "
1948 | \n", + "1.356500 | \n", + "
1949 | \n", + "1.430500 | \n", + "
1950 | \n", + "1.129100 | \n", + "
1951 | \n", + "1.396000 | \n", + "
1952 | \n", + "1.267200 | \n", + "
1953 | \n", + "1.109400 | \n", + "
1954 | \n", + "1.476600 | \n", + "
1955 | \n", + "1.661100 | \n", + "
1956 | \n", + "1.362800 | \n", + "
1957 | \n", + "1.185100 | \n", + "
1958 | \n", + "1.316000 | \n", + "
1959 | \n", + "1.235400 | \n", + "
1960 | \n", + "1.674900 | \n", + "
1961 | \n", + "1.447400 | \n", + "
1962 | \n", + "1.646300 | \n", + "
1963 | \n", + "1.040400 | \n", + "
1964 | \n", + "1.741700 | \n", + "
1965 | \n", + "1.412700 | \n", + "
1966 | \n", + "1.575200 | \n", + "
1967 | \n", + "1.043200 | \n", + "
1968 | \n", + "1.716600 | \n", + "
1969 | \n", + "1.285700 | \n", + "
1970 | \n", + "1.453900 | \n", + "
1971 | \n", + "1.383000 | \n", + "
1972 | \n", + "1.758500 | \n", + "
1973 | \n", + "1.173800 | \n", + "
1974 | \n", + "1.188800 | \n", + "
1975 | \n", + "1.487500 | \n", + "
1976 | \n", + "1.367200 | \n", + "
1977 | \n", + "1.105000 | \n", + "
1978 | \n", + "1.591300 | \n", + "
1979 | \n", + "1.161100 | \n", + "
1980 | \n", + "1.501300 | \n", + "
1981 | \n", + "1.301500 | \n", + "
1982 | \n", + "1.481200 | \n", + "
1983 | \n", + "1.153500 | \n", + "
1984 | \n", + "1.289400 | \n", + "
1985 | \n", + "1.539300 | \n", + "
1986 | \n", + "1.703700 | \n", + "
1987 | \n", + "1.267300 | \n", + "
1988 | \n", + "1.294200 | \n", + "
1989 | \n", + "1.357100 | \n", + "
1990 | \n", + "1.253700 | \n", + "
1991 | \n", + "1.334600 | \n", + "
1992 | \n", + "1.718800 | \n", + "
1993 | \n", + "1.563400 | \n", + "
1994 | \n", + "1.647900 | \n", + "
1995 | \n", + "1.547600 | \n", + "
1996 | \n", + "1.389200 | \n", + "
1997 | \n", + "1.322900 | \n", + "
1998 | \n", + "1.340500 | \n", + "
1999 | \n", + "1.504700 | \n", + "
2000 | \n", + "1.334000 | \n", + "
2001 | \n", + "1.203100 | \n", + "
2002 | \n", + "1.322800 | \n", + "
2003 | \n", + "1.123500 | \n", + "
2004 | \n", + "1.375200 | \n", + "
2005 | \n", + "1.306000 | \n", + "
2006 | \n", + "1.186800 | \n", + "
2007 | \n", + "1.512000 | \n", + "
2008 | \n", + "1.284300 | \n", + "
2009 | \n", + "1.442800 | \n", + "
2010 | \n", + "1.155800 | \n", + "
2011 | \n", + "1.905600 | \n", + "
2012 | \n", + "1.182600 | \n", + "
2013 | \n", + "1.731600 | \n", + "
2014 | \n", + "1.117500 | \n", + "
2015 | \n", + "1.741300 | \n", + "
2016 | \n", + "1.252900 | \n", + "
2017 | \n", + "1.029700 | \n", + "
2018 | \n", + "1.505600 | \n", + "
2019 | \n", + "1.401000 | \n", + "
2020 | \n", + "1.187700 | \n", + "
2021 | \n", + "1.833800 | \n", + "
2022 | \n", + "1.286800 | \n", + "
2023 | \n", + "1.372400 | \n", + "
2024 | \n", + "1.391300 | \n", + "
2025 | \n", + "1.304800 | \n", + "
2026 | \n", + "1.163900 | \n", + "
2027 | \n", + "1.471400 | \n", + "
2028 | \n", + "1.281000 | \n", + "
2029 | \n", + "1.183200 | \n", + "
2030 | \n", + "1.678900 | \n", + "
2031 | \n", + "1.595700 | \n", + "
2032 | \n", + "1.195000 | \n", + "
2033 | \n", + "1.263200 | \n", + "
2034 | \n", + "1.158200 | \n", + "
2035 | \n", + "1.103000 | \n", + "
2036 | \n", + "1.349300 | \n", + "
2037 | \n", + "1.183100 | \n", + "
2038 | \n", + "1.350600 | \n", + "
2039 | \n", + "1.523100 | \n", + "
2040 | \n", + "1.237700 | \n", + "
2041 | \n", + "1.607700 | \n", + "
2042 | \n", + "1.245600 | \n", + "
2043 | \n", + "1.104900 | \n", + "
2044 | \n", + "1.557800 | \n", + "
2045 | \n", + "1.367800 | \n", + "
2046 | \n", + "1.236800 | \n", + "
2047 | \n", + "1.188600 | \n", + "
2048 | \n", + "1.180500 | \n", + "
2049 | \n", + "1.279400 | \n", + "
2050 | \n", + "1.853500 | \n", + "
2051 | \n", + "1.236400 | \n", + "
2052 | \n", + "1.266600 | \n", + "
2053 | \n", + "1.298100 | \n", + "
2054 | \n", + "1.339700 | \n", + "
2055 | \n", + "1.247300 | \n", + "
2056 | \n", + "1.892200 | \n", + "
2057 | \n", + "1.289800 | \n", + "
2058 | \n", + "1.443800 | \n", + "
2059 | \n", + "1.269000 | \n", + "
2060 | \n", + "1.321000 | \n", + "
2061 | \n", + "1.594500 | \n", + "
2062 | \n", + "1.992100 | \n", + "
2063 | \n", + "1.409600 | \n", + "
2064 | \n", + "1.185900 | \n", + "
2065 | \n", + "1.257600 | \n", + "
2066 | \n", + "1.630700 | \n", + "
2067 | \n", + "1.443100 | \n", + "
2068 | \n", + "1.848100 | \n", + "
2069 | \n", + "1.965000 | \n", + "
2070 | \n", + "1.972600 | \n", + "
2071 | \n", + "1.723600 | \n", + "
2072 | \n", + "1.100800 | \n", + "
2073 | \n", + "1.829900 | \n", + "
2074 | \n", + "1.374600 | \n", + "
2075 | \n", + "1.558600 | \n", + "
2076 | \n", + "1.320900 | \n", + "
2077 | \n", + "1.538300 | \n", + "
2078 | \n", + "1.125100 | \n", + "
2079 | \n", + "1.539000 | \n", + "
2080 | \n", + "1.351400 | \n", + "
2081 | \n", + "1.666900 | \n", + "
2082 | \n", + "1.358900 | \n", + "
2083 | \n", + "1.170800 | \n", + "
2084 | \n", + "1.263400 | \n", + "
2085 | \n", + "1.038400 | \n", + "
2086 | \n", + "1.350100 | \n", + "
2087 | \n", + "1.527600 | \n", + "
2088 | \n", + "1.416600 | \n", + "
2089 | \n", + "1.632500 | \n", + "
2090 | \n", + "1.022900 | \n", + "
2091 | \n", + "1.270300 | \n", + "
2092 | \n", + "1.265800 | \n", + "
2093 | \n", + "1.895400 | \n", + "
2094 | \n", + "1.294000 | \n", + "
2095 | \n", + "1.276000 | \n", + "
2096 | \n", + "1.436200 | \n", + "
2097 | \n", + "1.248000 | \n", + "
2098 | \n", + "1.505700 | \n", + "
2099 | \n", + "1.201300 | \n", + "
2100 | \n", + "1.612800 | \n", + "
2101 | \n", + "1.577500 | \n", + "
2102 | \n", + "2.045800 | \n", + "
2103 | \n", + "1.448800 | \n", + "
2104 | \n", + "1.463300 | \n", + "
2105 | \n", + "1.385300 | \n", + "
2106 | \n", + "1.318200 | \n", + "
2107 | \n", + "1.241900 | \n", + "
2108 | \n", + "2.427100 | \n", + "
2109 | \n", + "1.897000 | \n", + "
2110 | \n", + "2.441200 | \n", + "
2111 | \n", + "1.286000 | \n", + "
2112 | \n", + "1.421300 | \n", + "
2113 | \n", + "1.428900 | \n", + "
2114 | \n", + "1.471300 | \n", + "
2115 | \n", + "1.356700 | \n", + "
2116 | \n", + "1.223000 | \n", + "
2117 | \n", + "1.253100 | \n", + "
2118 | \n", + "1.542300 | \n", + "
2119 | \n", + "1.530200 | \n", + "
2120 | \n", + "1.381900 | \n", + "
2121 | \n", + "1.474300 | \n", + "
2122 | \n", + "1.542500 | \n", + "
2123 | \n", + "1.249200 | \n", + "
2124 | \n", + "1.272600 | \n", + "
2125 | \n", + "1.536700 | \n", + "
2126 | \n", + "1.666900 | \n", + "
2127 | \n", + "1.646300 | \n", + "
2128 | \n", + "1.243100 | \n", + "
2129 | \n", + "1.347400 | \n", + "
2130 | \n", + "1.240400 | \n", + "
2131 | \n", + "1.707300 | \n", + "
2132 | \n", + "1.480700 | \n", + "
2133 | \n", + "1.199700 | \n", + "
2134 | \n", + "1.202100 | \n", + "
2135 | \n", + "1.802800 | \n", + "
2136 | \n", + "1.467500 | \n", + "
2137 | \n", + "1.199000 | \n", + "
2138 | \n", + "1.374700 | \n", + "
2139 | \n", + "1.688600 | \n", + "
2140 | \n", + "1.698300 | \n", + "
2141 | \n", + "1.324000 | \n", + "
2142 | \n", + "1.414500 | \n", + "
2143 | \n", + "1.875900 | \n", + "
2144 | \n", + "1.325200 | \n", + "
2145 | \n", + "1.566500 | \n", + "
2146 | \n", + "1.250600 | \n", + "
2147 | \n", + "1.428000 | \n", + "
2148 | \n", + "1.498400 | \n", + "
2149 | \n", + "1.564300 | \n", + "
2150 | \n", + "1.161100 | \n", + "
2151 | \n", + "1.302200 | \n", + "
2152 | \n", + "2.096400 | \n", + "
2153 | \n", + "2.035500 | \n", + "
2154 | \n", + "1.613100 | \n", + "
2155 | \n", + "1.231100 | \n", + "
2156 | \n", + "1.586100 | \n", + "
2157 | \n", + "1.632300 | \n", + "
2158 | \n", + "1.241100 | \n", + "
2159 | \n", + "1.634800 | \n", + "
2160 | \n", + "1.406300 | \n", + "
2161 | \n", + "1.202800 | \n", + "
2162 | \n", + "1.786200 | \n", + "
2163 | \n", + "1.317200 | \n", + "
2164 | \n", + "1.662700 | \n", + "
2165 | \n", + "1.107200 | \n", + "
2166 | \n", + "1.316000 | \n", + "
2167 | \n", + "1.307700 | \n", + "
2168 | \n", + "1.530900 | \n", + "
2169 | \n", + "1.149300 | \n", + "
2170 | \n", + "1.932500 | \n", + "
2171 | \n", + "1.565200 | \n", + "
2172 | \n", + "1.171800 | \n", + "
2173 | \n", + "1.433600 | \n", + "
2174 | \n", + "1.202100 | \n", + "
2175 | \n", + "1.938400 | \n", + "
2176 | \n", + "1.752000 | \n", + "
2177 | \n", + "1.347400 | \n", + "
2178 | \n", + "1.149800 | \n", + "
2179 | \n", + "1.058000 | \n", + "
2180 | \n", + "1.166900 | \n", + "
2181 | \n", + "1.536500 | \n", + "
2182 | \n", + "1.125400 | \n", + "
2183 | \n", + "1.385100 | \n", + "
2184 | \n", + "1.353000 | \n", + "
2185 | \n", + "1.516800 | \n", + "
2186 | \n", + "1.530400 | \n", + "
2187 | \n", + "1.435800 | \n", + "
2188 | \n", + "1.716300 | \n", + "
2189 | \n", + "1.272100 | \n", + "
2190 | \n", + "2.123100 | \n", + "
2191 | \n", + "1.586500 | \n", + "
2192 | \n", + "1.136500 | \n", + "
2193 | \n", + "1.392300 | \n", + "
2194 | \n", + "1.025900 | \n", + "
2195 | \n", + "1.360300 | \n", + "
2196 | \n", + "1.496100 | \n", + "
2197 | \n", + "2.067000 | \n", + "
2198 | \n", + "1.226700 | \n", + "
2199 | \n", + "1.702900 | \n", + "
2200 | \n", + "1.249700 | \n", + "
2201 | \n", + "1.100700 | \n", + "
2202 | \n", + "0.975700 | \n", + "
2203 | \n", + "1.589000 | \n", + "
2204 | \n", + "1.240000 | \n", + "
2205 | \n", + "1.398200 | \n", + "
2206 | \n", + "1.490700 | \n", + "
2207 | \n", + "1.447900 | \n", + "
2208 | \n", + "1.478700 | \n", + "
2209 | \n", + "1.427600 | \n", + "
2210 | \n", + "1.725500 | \n", + "
2211 | \n", + "1.476800 | \n", + "
2212 | \n", + "1.958500 | \n", + "
2213 | \n", + "1.426400 | \n", + "
2214 | \n", + "1.639300 | \n", + "
2215 | \n", + "1.646200 | \n", + "
2216 | \n", + "1.823300 | \n", + "
2217 | \n", + "1.333400 | \n", + "
2218 | \n", + "1.142500 | \n", + "
2219 | \n", + "1.508600 | \n", + "
2220 | \n", + "2.200100 | \n", + "
2221 | \n", + "1.579700 | \n", + "
2222 | \n", + "1.151400 | \n", + "
2223 | \n", + "1.449600 | \n", + "
2224 | \n", + "1.169100 | \n", + "
2225 | \n", + "1.495000 | \n", + "
2226 | \n", + "1.555500 | \n", + "
2227 | \n", + "1.301300 | \n", + "
2228 | \n", + "1.158000 | \n", + "
2229 | \n", + "1.273100 | \n", + "
2230 | \n", + "1.725400 | \n", + "
2231 | \n", + "1.451500 | \n", + "
2232 | \n", + "1.227900 | \n", + "
2233 | \n", + "1.666000 | \n", + "
2234 | \n", + "1.284600 | \n", + "
2235 | \n", + "1.223300 | \n", + "
2236 | \n", + "1.857500 | \n", + "
2237 | \n", + "1.610700 | \n", + "
2238 | \n", + "1.853600 | \n", + "
2239 | \n", + "1.503600 | \n", + "
2240 | \n", + "1.569900 | \n", + "
2241 | \n", + "1.335400 | \n", + "
2242 | \n", + "1.489300 | \n", + "
2243 | \n", + "1.528300 | \n", + "
2244 | \n", + "1.360300 | \n", + "
2245 | \n", + "1.085500 | \n", + "
2246 | \n", + "1.272100 | \n", + "
2247 | \n", + "1.243700 | \n", + "
2248 | \n", + "1.471000 | \n", + "
"
+ ],
+ "text/plain": [
+ "