diff --git "a/.ipynb_checkpoints/gradio_tryout-checkpoint.ipynb" "b/.ipynb_checkpoints/gradio_tryout-checkpoint.ipynb" --- "a/.ipynb_checkpoints/gradio_tryout-checkpoint.ipynb" +++ "b/.ipynb_checkpoints/gradio_tryout-checkpoint.ipynb" @@ -24,10 +24,108 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 36, "id": "f8c28d2d-8458-49fd-8ebf-5e729d6e861f", "metadata": {}, "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "9ec45d8fb4e247e4b1188972547ebb7f", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "config.json: 0%| | 0.00/1.09k [00:00 7\u001b[0m df \u001b[38;5;241m=\u001b[39m \u001b[43mpred_trip\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcurrent_trip\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcurrent_type\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcut_off\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;241;43m0.5\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 8\u001b[0m \u001b[38;5;28mprint\u001b[39m(df)\n\u001b[1;32m 10\u001b[0m \u001b[38;5;66;03m# accuracy, perc true classes identified and perc wrong pred classes\u001b[39;00m\n", + "Cell \u001b[0;32mIn[37], line 14\u001b[0m, in \u001b[0;36mpred_trip\u001b[0;34m(trip_descr, trip_type, cut_off)\u001b[0m\n\u001b[1;32m 12\u001b[0m classes \u001b[38;5;241m=\u001b[39m [result[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mlabels\u001b[39m\u001b[38;5;124m'\u001b[39m][i] \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m indices]\n\u001b[1;32m 13\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m---> 14\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mclassifier\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtrip_descr\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcandidate_labels\u001b[49m\u001b[43m[\u001b[49m\u001b[43mkey\u001b[49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 15\u001b[0m classes \u001b[38;5;241m=\u001b[39m result[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mlabels\u001b[39m\u001b[38;5;124m\"\u001b[39m][\u001b[38;5;241m0\u001b[39m]\n\u001b[1;32m 16\u001b[0m \u001b[38;5;28mprint\u001b[39m(result)\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/transformers/pipelines/zero_shot_classification.py:206\u001b[0m, in \u001b[0;36mZeroShotClassificationPipeline.__call__\u001b[0;34m(self, sequences, *args, **kwargs)\u001b[0m\n\u001b[1;32m 203\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 204\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mUnable to understand extra arguments \u001b[39m\u001b[38;5;132;01m{\u001b[39;00margs\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m--> 206\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[38;5;21;43m__call__\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43msequences\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/transformers/pipelines/base.py:1294\u001b[0m, in \u001b[0;36mPipeline.__call__\u001b[0;34m(self, inputs, num_workers, batch_size, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1292\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39miterate(inputs, preprocess_params, forward_params, postprocess_params)\n\u001b[1;32m 1293\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mframework \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mpt\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(\u001b[38;5;28mself\u001b[39m, ChunkPipeline):\n\u001b[0;32m-> 1294\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mnext\u001b[39;49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1295\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43miter\u001b[39;49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1296\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_iterator\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1297\u001b[0m \u001b[43m \u001b[49m\u001b[43m[\u001b[49m\u001b[43minputs\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnum_workers\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mbatch_size\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpreprocess_params\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mforward_params\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpostprocess_params\u001b[49m\n\u001b[1;32m 1298\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1299\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1300\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1301\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 1302\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mrun_single(inputs, preprocess_params, forward_params, postprocess_params)\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/transformers/pipelines/pt_utils.py:124\u001b[0m, in \u001b[0;36mPipelineIterator.__next__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 121\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mloader_batch_item()\n\u001b[1;32m 123\u001b[0m \u001b[38;5;66;03m# We're out of items within a batch\u001b[39;00m\n\u001b[0;32m--> 124\u001b[0m item \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mnext\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43miterator\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 125\u001b[0m processed \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39minfer(item, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mparams)\n\u001b[1;32m 126\u001b[0m \u001b[38;5;66;03m# We now have a batch of \"inferred things\".\u001b[39;00m\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/transformers/pipelines/pt_utils.py:269\u001b[0m, in \u001b[0;36mPipelinePackIterator.__next__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 266\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m accumulator\n\u001b[1;32m 268\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m is_last:\n\u001b[0;32m--> 269\u001b[0m processed \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43minfer\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mnext\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43miterator\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mparams\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 270\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mloader_batch_size \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 271\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(processed, torch\u001b[38;5;241m.\u001b[39mTensor):\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/transformers/pipelines/base.py:1209\u001b[0m, in \u001b[0;36mPipeline.forward\u001b[0;34m(self, model_inputs, **forward_params)\u001b[0m\n\u001b[1;32m 1207\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m inference_context():\n\u001b[1;32m 1208\u001b[0m model_inputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_ensure_tensor_on_device(model_inputs, device\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdevice)\n\u001b[0;32m-> 1209\u001b[0m model_outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_forward\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel_inputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mforward_params\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1210\u001b[0m model_outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_ensure_tensor_on_device(model_outputs, device\u001b[38;5;241m=\u001b[39mtorch\u001b[38;5;241m.\u001b[39mdevice(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcpu\u001b[39m\u001b[38;5;124m\"\u001b[39m))\n\u001b[1;32m 1211\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/transformers/pipelines/zero_shot_classification.py:229\u001b[0m, in \u001b[0;36mZeroShotClassificationPipeline._forward\u001b[0;34m(self, inputs)\u001b[0m\n\u001b[1;32m 227\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124muse_cache\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01min\u001b[39;00m inspect\u001b[38;5;241m.\u001b[39msignature(model_forward)\u001b[38;5;241m.\u001b[39mparameters\u001b[38;5;241m.\u001b[39mkeys():\n\u001b[1;32m 228\u001b[0m model_inputs[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124muse_cache\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mFalse\u001b[39;00m\n\u001b[0;32m--> 229\u001b[0m outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mmodel_inputs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 231\u001b[0m model_outputs \u001b[38;5;241m=\u001b[39m {\n\u001b[1;32m 232\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcandidate_label\u001b[39m\u001b[38;5;124m\"\u001b[39m: candidate_label,\n\u001b[1;32m 233\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msequence\u001b[39m\u001b[38;5;124m\"\u001b[39m: sequence,\n\u001b[1;32m 234\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mis_last\u001b[39m\u001b[38;5;124m\"\u001b[39m: inputs[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mis_last\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[1;32m 235\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39moutputs,\n\u001b[1;32m 236\u001b[0m }\n\u001b[1;32m 237\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m model_outputs\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/transformers/models/deberta_v2/modeling_deberta_v2.py:1297\u001b[0m, in \u001b[0;36mDebertaV2ForSequenceClassification.forward\u001b[0;34m(self, input_ids, attention_mask, token_type_ids, position_ids, inputs_embeds, labels, output_attentions, output_hidden_states, return_dict)\u001b[0m\n\u001b[1;32m 1289\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 1290\u001b[0m \u001b[38;5;124;03mlabels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):\u001b[39;00m\n\u001b[1;32m 1291\u001b[0m \u001b[38;5;124;03m Labels for computing the sequence classification/regression loss. Indices should be in `[0, ...,\u001b[39;00m\n\u001b[1;32m 1292\u001b[0m \u001b[38;5;124;03m config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If\u001b[39;00m\n\u001b[1;32m 1293\u001b[0m \u001b[38;5;124;03m `config.num_labels > 1` a classification loss is computed (Cross-Entropy).\u001b[39;00m\n\u001b[1;32m 1294\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 1295\u001b[0m return_dict \u001b[38;5;241m=\u001b[39m return_dict \u001b[38;5;28;01mif\u001b[39;00m return_dict \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconfig\u001b[38;5;241m.\u001b[39muse_return_dict\n\u001b[0;32m-> 1297\u001b[0m outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdeberta\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1298\u001b[0m \u001b[43m \u001b[49m\u001b[43minput_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1299\u001b[0m \u001b[43m \u001b[49m\u001b[43mtoken_type_ids\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtoken_type_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1300\u001b[0m \u001b[43m \u001b[49m\u001b[43mattention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mattention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1301\u001b[0m \u001b[43m \u001b[49m\u001b[43mposition_ids\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mposition_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1302\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs_embeds\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minputs_embeds\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1303\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_attentions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1304\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_hidden_states\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_hidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1305\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_dict\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_dict\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1306\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1308\u001b[0m encoder_layer \u001b[38;5;241m=\u001b[39m outputs[\u001b[38;5;241m0\u001b[39m]\n\u001b[1;32m 1309\u001b[0m pooled_output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpooler(encoder_layer)\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/transformers/models/deberta_v2/modeling_deberta_v2.py:1063\u001b[0m, in \u001b[0;36mDebertaV2Model.forward\u001b[0;34m(self, input_ids, attention_mask, token_type_ids, position_ids, inputs_embeds, output_attentions, output_hidden_states, return_dict)\u001b[0m\n\u001b[1;32m 1053\u001b[0m token_type_ids \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mzeros(input_shape, dtype\u001b[38;5;241m=\u001b[39mtorch\u001b[38;5;241m.\u001b[39mlong, device\u001b[38;5;241m=\u001b[39mdevice)\n\u001b[1;32m 1055\u001b[0m embedding_output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39membeddings(\n\u001b[1;32m 1056\u001b[0m input_ids\u001b[38;5;241m=\u001b[39minput_ids,\n\u001b[1;32m 1057\u001b[0m token_type_ids\u001b[38;5;241m=\u001b[39mtoken_type_ids,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1060\u001b[0m inputs_embeds\u001b[38;5;241m=\u001b[39minputs_embeds,\n\u001b[1;32m 1061\u001b[0m )\n\u001b[0;32m-> 1063\u001b[0m encoder_outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mencoder\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1064\u001b[0m \u001b[43m \u001b[49m\u001b[43membedding_output\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1065\u001b[0m \u001b[43m \u001b[49m\u001b[43mattention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1066\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_hidden_states\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 1067\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_attentions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1068\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_dict\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_dict\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1069\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1070\u001b[0m encoded_layers \u001b[38;5;241m=\u001b[39m encoder_outputs[\u001b[38;5;241m1\u001b[39m]\n\u001b[1;32m 1072\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mz_steps \u001b[38;5;241m>\u001b[39m \u001b[38;5;241m1\u001b[39m:\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/transformers/models/deberta_v2/modeling_deberta_v2.py:507\u001b[0m, in \u001b[0;36mDebertaV2Encoder.forward\u001b[0;34m(self, hidden_states, attention_mask, output_hidden_states, output_attentions, query_states, relative_pos, return_dict)\u001b[0m\n\u001b[1;32m 497\u001b[0m output_states \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_gradient_checkpointing_func(\n\u001b[1;32m 498\u001b[0m layer_module\u001b[38;5;241m.\u001b[39m\u001b[38;5;21m__call__\u001b[39m,\n\u001b[1;32m 499\u001b[0m next_kv,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 504\u001b[0m output_attentions,\n\u001b[1;32m 505\u001b[0m )\n\u001b[1;32m 506\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 507\u001b[0m output_states \u001b[38;5;241m=\u001b[39m \u001b[43mlayer_module\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 508\u001b[0m \u001b[43m \u001b[49m\u001b[43mnext_kv\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 509\u001b[0m \u001b[43m \u001b[49m\u001b[43mattention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 510\u001b[0m \u001b[43m \u001b[49m\u001b[43mquery_states\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mquery_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 511\u001b[0m \u001b[43m \u001b[49m\u001b[43mrelative_pos\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrelative_pos\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 512\u001b[0m \u001b[43m \u001b[49m\u001b[43mrel_embeddings\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrel_embeddings\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 513\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_attentions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 514\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 516\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m output_attentions:\n\u001b[1;32m 517\u001b[0m output_states, att_m \u001b[38;5;241m=\u001b[39m output_states\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/transformers/models/deberta_v2/modeling_deberta_v2.py:366\u001b[0m, in \u001b[0;36mDebertaV2Layer.forward\u001b[0;34m(self, hidden_states, attention_mask, query_states, relative_pos, rel_embeddings, output_attentions)\u001b[0m\n\u001b[1;32m 364\u001b[0m attention_output, att_matrix \u001b[38;5;241m=\u001b[39m attention_output\n\u001b[1;32m 365\u001b[0m intermediate_output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mintermediate(attention_output)\n\u001b[0;32m--> 366\u001b[0m layer_output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43moutput\u001b[49m\u001b[43m(\u001b[49m\u001b[43mintermediate_output\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mattention_output\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 367\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m output_attentions:\n\u001b[1;32m 368\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m (layer_output, att_matrix)\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/transformers/models/deberta_v2/modeling_deberta_v2.py:332\u001b[0m, in \u001b[0;36mDebertaV2Output.forward\u001b[0;34m(self, hidden_states, input_tensor)\u001b[0m\n\u001b[1;32m 331\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, hidden_states, input_tensor):\n\u001b[0;32m--> 332\u001b[0m hidden_states \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdense\u001b[49m\u001b[43m(\u001b[49m\u001b[43mhidden_states\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 333\u001b[0m hidden_states \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdropout(hidden_states)\n\u001b[1;32m 334\u001b[0m hidden_states \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mLayerNorm(hidden_states \u001b[38;5;241m+\u001b[39m input_tensor)\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "File \u001b[0;32m~/opt/anaconda3/envs/huggingface_env/lib/python3.8/site-packages/torch/nn/modules/linear.py:116\u001b[0m, in \u001b[0;36mLinear.forward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 115\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m: Tensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tensor:\n\u001b[0;32m--> 116\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlinear\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mweight\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbias\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m: " ] } ], @@ -818,38 +353,10 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": null, "id": "eb33fd31-94e6-40b5-9c36-a32effe77c01", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " superclass same_value same_value same_value same_value \\\n", - "0 activity_type True True False True \n", - "1 activities False False False False \n", - "2 climate_or_season True True True False \n", - "3 style_or_comfort False False True False \n", - "4 dress_code True False True True \n", - "5 accommodation False False False False \n", - "6 transportation True True False True \n", - "7 special_conditions False False False False \n", - "8 trip_length_days False True True True \n", - "\n", - " same_value same_value same_value same_value same_value same_value \n", - "0 True False True True True True \n", - "1 False False False False False False \n", - "2 True False False True True False \n", - "3 False True True False True False \n", - "4 True True True False True False \n", - "5 False True True False False True \n", - "6 False True True True True True \n", - "7 False False False False False False \n", - "8 True False False False False True \n" - ] - } - ], + "outputs": [], "source": [ "# Extract \"same_value\" column from each DataFrame\n", "sv_columns = [df['same_value'] for df in result_list] # 'same' needs to be changed\n", @@ -863,27 +370,10 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": null, "id": "bf7546cb-79ce-49ad-8cee-54d02239220c", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " superclass accuracy\n", - "0 activity_type 0.8\n", - "1 activities 0.0\n", - "2 climate_or_season 0.6\n", - "3 style_or_comfort 0.4\n", - "4 dress_code 0.7\n", - "5 accommodation 0.3\n", - "6 transportation 0.8\n", - "7 special_conditions 0.0\n", - "8 trip_length_days 0.5\n" - ] - } - ], + "outputs": [], "source": [ "# Compute accuracy per superclass (row means of same_value matrix excluding the first column)\n", "row_means = sv_df.iloc[:, 1:].mean(axis=1)\n", @@ -898,33 +388,12 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": null, "id": "fd232953-59e8-4f28-9ce8-11515a2c310b", "metadata": { "scrolled": true }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "accuracy 0.454545\n", - "true_ident 0.689394\n", - "false_pred 0.409091\n", - "dtype: float64\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAA90AAAJOCAYAAACqS2TfAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABVZklEQVR4nO3de3hU5bn+8XsShgnBJCDhTDhXKCBSQRAQQQUiKoq1IkIhoqLVYMVcWkE3kpRqqFo23YqoVMFaEBTFuhWBgES2BcpZARU5ekA5BCQJRIYh8/7+sJmfMQEyYb2Zmcz3c125dK1515pnnnnDWndmzYzLGGMEAAAAAAAcFxPqAgAAAAAAqK4I3QAAAAAAWELoBgAAAADAEkI3AAAAAACWELoBAAAAALCE0A0AAAAAgCWEbgAAAAAALCF0AwAAAABgCaEbAAAAAABLCN0AAABABGnZsqVuu+22s46bPXu2XC6X9u7da70mAKdH6AYAAAAkrVq1SpmZmTp69GioS4kYRUVFyszMVG5ubqhLAcJWjVAXAAAAAISDVatWKSsrS7fddpvq1KkT6nJOa/v27YqJCY/XzoqKipSVlSVJ6tevX2iLAcJUePy2Aghrx48fD3UJAACEDb/frxMnToTs/j0ej9xud8juH0BwCN1ACHz55Ze699571a5dO9WqVUv16tXTzTffXO57ro4ePaoHHnhALVu2lMfjUbNmzTRq1Cjl5eUFxpw4cUKZmZm64IILFBcXp8aNG+vXv/61du3aJUnKzc2Vy+Uqc+nX3r175XK5NHv27MC62267Teedd5527dqla665RgkJCRoxYoQk6f/+7/908803q3nz5vJ4PEpJSdEDDzygH374oUzdn3/+uYYOHar69eurVq1aateunR599FFJ0ooVK+RyubRw4cIy282dO1cul0urV68Otq0AAFRaZmamHnroIUlSq1at5HK5Au+HdrlcGjt2rObMmaOOHTvK4/Fo8eLFQR1fpR+Pjb/5zW90/vnnKy4uTt26ddM777wTdK3lvad727ZtuvLKK1WrVi01a9ZMf/rTn+T3+8vd/v3331efPn1Uu3ZtJSQk6Nprr9W2bdtKjSk5H9i3b5+GDBmi8847T/Xr19eDDz6o4uLiwOOsX7++JCkrKyvQs8zMzKAfE1CdcXk5EALr1q3TqlWrNGzYMDVr1kx79+7VjBkz1K9fP3366aeKj4+XJB07dkx9+vTRZ599pttvv10XX3yx8vLy9M477+ibb75RcnKyiouLdd1112n58uUaNmyY7r//fhUWFionJ0dbt25VmzZtgq7v1KlTSk1N1WWXXaann346UM8bb7yhoqIi3XPPPapXr57Wrl2rZ555Rt98843eeOONwPaffPKJ+vTpI7fbrbvuukstW7bUrl279L//+796/PHH1a9fP6WkpGjOnDm68cYbS933nDlz1KZNG/Xs2fMcOgwAQHB+/etf64svvtBrr72m//7v/1ZycrIkBULlBx98oNdff11jx45VcnKyWrZsGdR7v7dt26bevXuradOmGj9+vGrXrq3XX39dQ4YM0ZtvvlnmeBiM/fv364orrtCpU6cC+37xxRdVq1atMmNfffVVpaWlKTU1VX/+859VVFSkGTNm6LLLLtOmTZvUsmXLwNji4mKlpqaqR48eevrpp7Vs2TL95S9/UZs2bXTPPfeofv36mjFjhu655x7deOON+vWvfy1J6ty5c6UfC1AtGQBVrqioqMy61atXG0nm73//e2DdY489ZiSZt956q8x4v99vjDHm5ZdfNpLM1KlTTztmxYoVRpJZsWJFqdv37NljJJlZs2YF1qWlpRlJZvz48RWqOzs727hcLvPll18G1l1++eUmISGh1Lqf1mOMMRMmTDAej8ccPXo0sO7gwYOmRo0aZtKkSWXuBwAA25566ikjyezZs6fUekkmJibGbNu2rdT6YI6vV111lbnwwgvNiRMnAuv8fr/p1auX+cUvfhFUnS1atDBpaWmB5XHjxhlJ5t///ndg3cGDB01SUlKpx1NYWGjq1KljxowZU2p/+/fvN0lJSaXWl5wP/PGPfyw19le/+pXp2rVrYPnQoUNGEsdu4Ay4vBwIgZ/+5dnn8+nw4cNq27at6tSpo40bNwZue/PNN3XRRReV+9dvl8sVGJOcnKz77rvvtGMq45577jlj3cePH1deXp569eolY4w2bdokSTp06JBWrlyp22+/Xc2bNz9tPaNGjZLX69WCBQsC6+bPn69Tp07pt7/9baXrBgDAhr59+6pDhw6V2vbIkSP64IMPNHToUBUWFiovL095eXk6fPiwUlNTtWPHDu3bt6/StS1atEiXXnqpunfvHlhXv379wNvDSuTk5Ojo0aO69dZbAzXk5eUpNjZWPXr00IoVK8rs+3e/+12p5T59+mj37t2VrhWIRlxeDoTADz/8oOzsbM2aNUv79u2TMSZwW35+fuD/d+3apZtuuumM+9q1a5fatWunGjWc+3WuUaOGmjVrVmb9V199pccee0zvvPOOvv/++1K3ldRdciDu1KnTGe+jffv2uuSSSzRnzhzdcccdkn68tPzSSy9V27ZtnXgYAAA4plWrVpXedufOnTLGaOLEiZo4cWK5Yw4ePKimTZtWav9ffvmlevToUWZ9u3btSi3v2LFDknTllVeWu5/ExMRSy3FxcYHL60vUrVu3zDkAgDMjdAMhcN9992nWrFkaN26cevbsqaSkJLlcLg0bNuy0H3pyLk73infJB6H8nMfjKfNVJMXFxRowYICOHDmihx9+WO3bt1ft2rW1b98+3XbbbZWqe9SoUbr//vv1zTffyOv1as2aNXr22WeD3g8AALaV9/7oih5fS46RDz74oFJTU8vdpir+4FxSx6uvvqpGjRqVuf3nf8CPjY21XhMQDQjdQAgsWLBAaWlp+stf/hJYd+LEiTIfyNKmTRtt3br1jPtq06aN/v3vf8vn853260Pq1q0rSWX2/+WXX1a45i1btuiLL77QK6+8olGjRgXW5+TklBrXunVrSTpr3ZI0bNgwZWRk6LXXXtMPP/wgt9utW265pcI1AQDgpGDfllXR42vJsdHtdqt///6VL/A0WrRoEXgV+6e2b99earnkw1UbNGjgWB3n8lY2IFrwnm4gBGJjY0tdUi5JzzzzTJm/jN900036+OOPy/1qrZLtb7rpJuXl5ZX7CnHJmBYtWig2NlYrV64sdftzzz0XVM0/3WfJ///1r38tNa5+/fq6/PLL9fLLL+urr74qt54SycnJGjRokP7xj39ozpw5uvrqqwOfFgsAQFWrXbu2pLIh+nQqenxt0KCB+vXrpxdeeEHfffddmf0cOnSocgX/xzXXXKM1a9Zo7dq1pfY5Z86cUuNSU1OVmJioJ554Qj6fz5E6Sr7hJJhPcgeiDa90AyFw3XXX6dVXX1VSUpI6dOig1atXa9myZapXr16pcQ899JAWLFigm2++Wbfffru6du2qI0eO6J133tHzzz+viy66SKNGjdLf//53ZWRkaO3aterTp4+OHz+uZcuW6d5779UNN9ygpKQk3XzzzXrmmWfkcrnUpk0bvfvuuzp48GCFa27fvr3atGmjBx98UPv27VNiYqLefPPNct/X9T//8z+67LLLdPHFF+uuu+5Sq1attHfvXr333nvavHlzqbGjRo3Sb37zG0nS5MmTg28mAAAO6dq1qyTp0Ucf1bBhw+R2uzV48ODTjg/m+Dp9+nRddtlluvDCCzVmzBi1bt1aBw4c0OrVq/XNN9/o448/rnTdf/jDH/Tqq6/q6quv1v333x/4yrAWLVrok08+CYxLTEzUjBkzNHLkSF188cUaNmyY6tevr6+++krvvfeeevfuHfTbvGrVqqUOHTpo/vz5uuCCC3T++eerU6dOZ/1sFyCqhOxz04Eo9v3335vRo0eb5ORkc95555nU1FTz+eefl/kKEGOMOXz4sBk7dqxp2rSpqVmzpmnWrJlJS0szeXl5gTFFRUXm0UcfNa1atTJut9s0atTI/OY3vzG7du0KjDl06JC56aabTHx8vKlbt665++67zdatW8v9yrDatWuXW/enn35q+vfvb8477zyTnJxsxowZYz7++OMy+zDGmK1bt5obb7zR1KlTx8TFxZl27dqZiRMnltmn1+s1devWNUlJSeaHH34IvpkAADho8uTJpmnTpiYmJibwdVuSTHp6ernjK3p8NcaYXbt2mVGjRplGjRoZt9ttmjZtaq677jqzYMGCoGos73zhk08+MX379jVxcXGmadOmZvLkyeall14q9yvQVqxYYVJTU01SUpKJi4szbdq0MbfddptZv359YMzpzgcmTZpkfh4hVq1aZbp27Wpq1qzJ14cB5XAZ87PrPQGgCp06dUpNmjTR4MGD9dJLL4W6HAAAAMBRvKcbQEi9/fbbOnToUKkPZwMAAACqC17pBhAS//73v/XJJ59o8uTJSk5O1saNG0NdEgAAIbV///4z3l6rVi0lJSVVUTUAnMIHqQEIiRkzZugf//iHunTpotmzZ4e6HAAAQq5x48ZnvD0tLY1jJhCBeKUbAAAACAPLli074+1NmjRRhw4dqqgaAE4hdAMAAAAAYAkfpAYAAAAAgCVV/p5uv9+vb7/9VgkJCXK5XFV99wAAhD1jjAoLC9WkSRPFxITm7+McrwEAOLOKHq+rPHR/++23SklJqeq7BQAg4nz99ddq1qxZSO6b4zUAABVztuN1lYfuhIQEST8WlpiYWNV37zifz6elS5dq4MCBcrvdoS4natD3qkfPQ4O+h0ao+15QUKCUlJTAMTMUbByvQ93X6oReOot+OodeOot+OsdGLyt6vK7y0F1yiVpiYmK1Cd3x8fFKTEzkF6EK0feqR89Dg76HRrj0PZSXdds4XodLX6sDeuks+ukceuks+ukcm7082/GaD1IDAAAAAMASQjcAAAAAAJYQugEAAAAAsITQDQAAAACAJUGF7uLiYk2cOFGtWrVSrVq11KZNG02ePFnGGFv1AQCASsjMzJTL5Sr10759+1CXBQBA1Anq08v//Oc/a8aMGXrllVfUsWNHrV+/XqNHj1ZSUpJ+//vf26oRAABUQseOHbVs2bLAco0aVf6lJQAARL2gjr6rVq3SDTfcoGuvvVaS1LJlS7322mtau3atleIAAEDl1ahRQ40aNQp1GQAARLWgQnevXr304osv6osvvtAFF1ygjz/+WB999JGmTp162m28Xq+8Xm9guaCgQNKP35Pm8/kqWXb4KHkM1eGxRBL6XvXoeWjQ99AIdd+dut8dO3aoSZMmiouLU8+ePZWdna3mzZuXO7Yqjteh7mt1Qi+dRT+dQy+dRT+dY6OXFd2XywTxhmy/369HHnlETz75pGJjY1VcXKzHH39cEyZMOO02mZmZysrKKrN+7ty5io+Pr+hdAwAQNYqKijR8+HDl5+crMTGxUvt4//33dezYMbVr107fffedsrKytG/fPm3dulUJCQllxnO8BgAgOBU9XgcVuufNm6eHHnpITz31lDp27KjNmzdr3Lhxmjp1qtLS0srdpry/nKekpCgvL6/SJxLhxOfzKScnRwMGDJDb7Q51OVGDvjurU+aSs47xxBhN7ubXxPUx8vpdVVBVWVszU0Nyv6HEXA+NUPe9oKBAycnJ5xS6f+7o0aNq0aKFpk6dqjvuuKPM7VVxvA51X6sTeuks+ukceumsSOlnRc4lQ63kXNbJXlb0eB3U5eUPPfSQxo8fr2HDhkmSLrzwQn355ZfKzs4+bej2eDzyeDxl1rvd7rCeOMGqbo8nUtB3Z3iLKx6ivX5XUOOdFM3PNXM9NELVdxv3WadOHV1wwQXauXNnubdX5fGa+ewceuks+ukceumscO9nqM4NK8PJXlZ0P0F9ZVhRUZFiYkpvEhsbK7/fH8xuAABAFTt27Jh27dqlxo0bh7oUAACiSlCvdA8ePFiPP/64mjdvro4dO2rTpk2aOnWqbr/9dlv1AQCASnjwwQc1ePBgtWjRQt9++60mTZqk2NhY3XrrraEuDQCAqBJU6H7mmWc0ceJE3XvvvTp48KCaNGmiu+++W4899pit+gAAQCV88803uvXWW3X48GHVr19fl112mdasWaP69euHujQAAKJKUKE7ISFB06ZN07Rp0yyVAwAAnDBv3rxQlwAAABTke7oBAAAAAEDFEboBAAAAALCE0A0AAAAAgCWEbgAAAAAALCF0AwAAAABgCaEbAAAAAABLCN0AAAAAAFhC6AYAAAAAwBJCNwAAAAAAlhC6AQAAAACwhNANAAAAAIAlhG4AAAAAACwhdAMAAAAAYAmhGwAAAAAASwjdAAAAAABYQugGAAAAAMASQjcAAAAAAJYQugEAAAAAsITQDQAAAACAJYRuAAAAAAAsIXQDAAAAAGAJoRsAAAAAAEsI3QAAAAAAWELoBgAAAADAEkI3AAAAAACWELoBAAAAALCE0A0AAAAAgCWEbgAAAAAALCF0AwAAAABgCaEbAAAAAABLCN0AAAAAAFhC6AYAAAAAwBJCNwAAAAAAlhC6AQCo5qZMmSKXy6Vx48aFuhQAAKIOoRsAgGps3bp1euGFF9S5c+dQlwIAQFQidAMAUE0dO3ZMI0aM0MyZM1W3bt1QlwMAQFSqEeoCAACAHenp6br22mvVv39//elPfzrjWK/XK6/XG1guKCiQJPl8Pvl8PkfqKdmPU/uLZvTSWfTTOfTSWZHST0+sCXUJZ+WJ+bFGJ3tZ0X0RugEAqIbmzZunjRs3at26dRUan52draysrDLrly5dqvj4eEdry8nJcXR/0YxeOot+OodeOivc+/lk91BXUHFO9rKoqKhC4wjdAABUM19//bXuv/9+5eTkKC4urkLbTJgwQRkZGYHlgoICpaSkaODAgUpMTHSkLp/Pp5ycHA0YMEBut9uRfUYreuks+ukceumsSOlnp8wloS7hrDwxRpO7+R3tZclVYWdD6AYAoJrZsGGDDh48qIsvvjiwrri4WCtXrtSzzz4rr9er2NjYUtt4PB55PJ4y+3K73Y6f6NnYZ7Sil86in86hl84K9356i12hLqHCnOxlRfdD6AYAoJq56qqrtGXLllLrRo8erfbt2+vhhx8uE7gBAIA9hG4AAKqZhIQEderUqdS62rVrq169emXWAwAAu/jKMAAAAAAALOGVbgAAokBubm6oSwAAICoF/Ur3vn379Nvf/lb16tVTrVq1dOGFF2r9+vU2agMAAAAAIKIF9Ur3999/r969e+uKK67Q+++/r/r162vHjh2qW7eurfoAAAAAAIhYQYXuP//5z0pJSdGsWbMC61q1auV4UQAAAAAAVAdBhe533nlHqampuvnmm/Xhhx+qadOmuvfeezVmzJjTbuP1euX1egPLJV8g7vP55PP5Kll2+Ch5DNXhsUQS+u4sT6w5+5gYU+q/oRCNzzdzPTRC3XeebwAAqo+gQvfu3bs1Y8YMZWRk6JFHHtG6dev0+9//XjVr1lRaWlq522RnZysrK6vM+qVLlyo+Pr5yVYehnJycUJcQlei7M57sXvGxk7v57RVyFosWLQrZfYcacz00QtX3oqKikNwvAABwXlCh2+/3q1u3bnriiSckSb/61a+0detWPf/886cN3RMmTFBGRkZguaCgQCkpKRo4cKASExPPofTw4PP5lJOTowEDBsjtdoe6nKhB353VKXPJWcd4Yowmd/Nr4voYef2uKqiqrK2ZqSG531BirodGqPteclUYAACIfEGF7saNG6tDhw6l1v3yl7/Um2++edptPB6PPB5PmfVut7tanUBWt8cTKei7M7zFFQ/RXr8rqPFOiubnmrkeGqHqO881AADVR1BfGda7d29t37691LovvvhCLVq0cLQoAAAAAACqg6BC9wMPPKA1a9boiSee0M6dOzV37ly9+OKLSk9Pt1UfAAAAAAARK6jQfckll2jhwoV67bXX1KlTJ02ePFnTpk3TiBEjbNUHAAAAAEDECuo93ZJ03XXX6brrrrNRCwAAAAAA1UpQr3QDAAAAAICKI3QDAAAAAGAJoRsAAAAAAEsI3QAAAAAAWELoBgAAAADAEkI3AAAAAACWELoBAAAAALCE0A0AAAAAgCWEbgAAAAAALCF0AwAAAABgCaEbAAAAAABLCN0AAAAAAFhC6AYAAAAAwBJCNwAAAAAAlhC6AQAAAACwhNANAAAAAIAlhG4AAAAAACwhdAMAAAAAYAmhGwAAAAAASwjdAAAAAABYQugGAAAAAMASQjcAAAAAAJYQugEAqIZmzJihzp07KzExUYmJierZs6fef//9UJcFAEDUIXQDAFANNWvWTFOmTNGGDRu0fv16XXnllbrhhhu0bdu2UJcGAEBUqRHqAgAAgPMGDx5cavnxxx/XjBkztGbNGnXs2DFEVQEAEH0I3QAAVHPFxcV64403dPz4cfXs2TPU5QAAEFUI3QAAVFNbtmxRz549deLECZ133nlauHChOnToUO5Yr9crr9cbWC4oKJAk+Xw++Xw+R+op2Y9T+4tm9NJZ9NM59NJZkdJPT6wJdQln5Yn5sUYne1nRfRG6AQCoptq1a6fNmzcrPz9fCxYsUFpamj788MNyg3d2draysrLKrF+6dKni4+MdrSsnJ8fR/UUzeuks+ukceumscO/nk91DXUHFOdnLoqKiCo0jdAMAUE3VrFlTbdu2lSR17dpV69at01//+le98MILZcZOmDBBGRkZgeWCggKlpKRo4MCBSkxMdKQen8+nnJwcDRgwQG6325F9Rit66axI6menzCWhLuGMPDFGk7v5I6KXkSBS5ma4z0vJztwsuSrsbAjdAABECb/fX+oS8p/yeDzyeDxl1rvdbsdP9GzsM1rRS2dFQj+9xa5Ql1AhkdDLSBLu/YyUeSk528uK7ofQDQBANTRhwgQNGjRIzZs3V2FhoebOnavc3FwtWRL+r0YAAFCdELoBAKiGDh48qFGjRum7775TUlKSOnfurCVLlmjAgAGhLg0AgKhC6AYAoBp66aWXQl0CAACQFBPqAgAAAAAAqK4I3QAAAAAAWELoBgAAAADAEkI3AAAAAACWELoBAAAAALCE0A0AAAAAgCWEbgAAAAAALCF0AwAAAABgCaEbAAAAAABLCN0AAAAAAFhC6AYAAAAAwJJzCt1TpkyRy+XSuHHjHCoHAAAAAIDqo9Khe926dXrhhRfUuXNnJ+sBAAAAAKDaqFToPnbsmEaMGKGZM2eqbt26TtcEAAAAAEC1UKnQnZ6ermuvvVb9+/d3uh4AAAAAAKqNGsFuMG/ePG3cuFHr1q2r0Hiv1yuv1xtYLigokCT5fD75fL5g7z7slDyG6vBYIgl9d5Yn1px9TIwp9d9QiMbnm7keGqHuO883AADVR1Ch++uvv9b999+vnJwcxcXFVWib7OxsZWVllVm/dOlSxcfHB3P3YS0nJyfUJUQl+u6MJ7tXfOzkbn57hZzFokWLQnbfocZcD41Q9b2oqCgk9wsAAJwXVOjesGGDDh48qIsvvjiwrri4WCtXrtSzzz4rr9er2NjYUttMmDBBGRkZgeWCggKlpKRo4MCBSkxMPMfyQ8/n8yknJ0cDBgyQ2+0OdTlRg747q1PmkrOO8cQYTe7m18T1MfL6XVVQVVlbM1NDcr+hxFwPjVD3veSqMAAAEPmCCt1XXXWVtmzZUmrd6NGj1b59ez388MNlArckeTweeTyeMuvdbne1OoGsbo8nUtB3Z3iLKx6ivX5XUOOdFM3PNXM9NELVd55rAACqj6BCd0JCgjp16lRqXe3atVWvXr0y6wEAAAAAiHaV/p5uAAAAAABwZkF/evnP5ebmOlAGAAAAAADVD690AwAAAABgCaEbAAAAAABLCN0AAAAAAFhC6AYAAAAAwBJCNwAAAAAAlhC6AQAAAACwhNANAAAAAIAlhG4AAAAAACwhdAMAAAAAYAmhGwAAAAAASwjdAAAAAABYQugGAKAays7O1iWXXKKEhAQ1aNBAQ4YM0fbt20NdFgAAUYfQDQBANfThhx8qPT1da9asUU5Ojnw+nwYOHKjjx4+HujQAAKJKjVAXAAAAnLd48eJSy7Nnz1aDBg20YcMGXX755SGqCgCA6MMr3QAARIH8/HxJ0vnnnx/iSgAAiC680g0AQDXn9/s1btw49e7dW506dSp3jNfrldfrDSwXFBRIknw+n3w+nyN1lOzHqf1FM3rprEjqpyfWhLqEM/LE/FhfJPQyEkTK3Az3eSnZmZsV3RehGwCAai49PV1bt27VRx99dNox2dnZysrKKrN+6dKlio+Pd7SenJwcR/cXzeilsyKhn092D3UFFRMJvYwk4d7PSJmXkrO9LCoqqtA4QjcAANXY2LFj9e6772rlypVq1qzZacdNmDBBGRkZgeWCggKlpKRo4MCBSkxMdKQWn8+nnJwcDRgwQG6325F9Rit66axI6menzCWhLuGMPDFGk7v5I6KXkSBS5ma4z0vJztwsuSrsbAjdAABUQ8YY3XfffVq4cKFyc3PVqlWrM473eDzyeDxl1rvdbsdP9GzsM1rRS2dFQj+9xa5Ql1AhkdDLSBLu/YyUeSk528uK7ofQDQBANZSenq65c+fqn//8pxISErR//35JUlJSkmrVqhXi6gAAiB58ejkAANXQjBkzlJ+fr379+qlx48aBn/nz54e6NAAAogqvdAMAUA0ZE/6fJAsAQDTglW4AAAAAACwhdAMAAAAAYAmhGwAAAAAASwjdAAAAAABYQugGAAAAAMASQjcAAAAAAJYQugEAAAAAsITQDQAAAACAJYRuAAAAAAAsIXQDAAAAAGAJoRsAAAAAAEsI3QAAAAAAWELoBgAAAADAEkI3AAAAAACWELoBAAAAALCE0A0AAAAAgCWEbgAAAAAALCF0AwAAAABgCaEbAAAAAABLCN0AAAAAAFhC6AYAAAAAwBJCNwAAAAAAlgQVurOzs3XJJZcoISFBDRo00JAhQ7R9+3ZbtQEAAAAAENGCCt0ffvih0tPTtWbNGuXk5Mjn82ngwIE6fvy4rfoAAAAAAIhYNYIZvHjx4lLLs2fPVoMGDbRhwwZdfvnljhYGAAAAAECkO6f3dOfn50uSzj//fEeKAQAAAACgOgnqle6f8vv9GjdunHr37q1OnTqddpzX65XX6w0sFxQUSJJ8Pp98Pl9l7z5slDyG6vBYIgl9d5Yn1px9TIwp9d9QiMbnm7keGqHuO883AADVR6VDd3p6urZu3aqPPvrojOOys7OVlZVVZv3SpUsVHx9f2bsPOzk5OaEuISrRd2c82b3iYyd389sr5CwWLVoUsvsONeZ6aISq70VFRSG5XwAA4LxKhe6xY8fq3Xff1cqVK9WsWbMzjp0wYYIyMjICywUFBUpJSdHAgQOVmJhYmbsPKz6fTzk5ORowYIDcbneoy4ka9N1ZnTKXnHWMJ8Zocje/Jq6PkdfvqoKqytqamRqS+w2laJ/rFZmbNgQ7352emyVXhQEAgMgXVOg2xui+++7TwoULlZubq1atWp11G4/HI4/HU2a92+2uVieQ1e3xRAr67gxvccVDtNfvCmq8k6L5uY7WuR6quRa4/wrOd6efm2h8rgEAqK6CCt3p6emaO3eu/vnPfyohIUH79++XJCUlJalWrVpWCgQAAAAAIFIF9enlM2bMUH5+vvr166fGjRsHfubPn2+rPgAAAAAAIlbQl5cDAAAAAICKOafv6QYAAOFp5cqVGjx4sJo0aSKXy6W333471CUBABCVCN0AAFRDx48f10UXXaTp06eHuhQAAKJapb+nGwAAhK9BgwZp0KBBoS4DAICoxyvdAAAAAABYwivdAABAXq9XXq83sFxQUCBJ8vl88vl8jtxHyX6c2l80o5fOiqR+emLD+4ONPTE/1hcJvYwEkTI3w31eSnbmZkX3RegGAADKzs5WVlZWmfVLly5VfHy8o/eVk5Pj6P6iGb10ViT088nuoa6gYiKhl5Ek3PsZKfNScraXRUVFFRpH6AYAAJowYYIyMjICywUFBUpJSdHAgQOVmJjoyH34fD7l5ORo4voYef0uR/Zpy9bM1FCXcEYlvRwwYIDcbneoyzmjTplLQl3CWXlijCZ389NPB9BLZ0VKP6O1lyVXhZ0NoRsAAMjj8cjj8ZRZ73a7HT/R8/pd8haHd+gO55Pbn7Lx/Dgt3J/rn6KfzqGXzgr3fkZrLyu6H0I3AADV0LFjx7Rz587A8p49e7R582adf/75at68eQgrAwAguhC6AQCohtavX68rrrgisFxy6XhaWppmz54doqoAAIg+hG4AAKqhfv36yZjw/zRZAACqO76nGwAAAAAASwjdAAAAAABYQugGAAAAAMASQjcAAAAAAJYQugEAAAAAsITQDQAAAACAJYRuAAAAAAAsIXQDAAAAAGAJoRsAAAAAAEsI3QAAAAAAWFIj1AU4oeX490J2355Yoye7S50yl8hb7Drj2L1Trq2iqgAAAAAA4YBXugEAAAAAsITQDQAAAACAJYRuAAAAAAAsIXQDAAAAAGAJoRsAAAAAAEsI3QAAAAAAWELoBgAAAADAEkI3AAAAAACWELoBAAAAALCE0A0AAAAAgCWEbgAAAAAALCF0AwAAAABgCaEbAAAAAABLCN0AAAAAAFhC6AYAAAAAwBJCNwAAAAAAlhC6AQAAAACwhNANAAAAAIAlhG4AAAAAACwhdAMAAAAAYAmhGwAAAAAASwjdAAAAAABYUqnQPX36dLVs2VJxcXHq0aOH1q5d63RdAADgHHG8BgAg9IIO3fPnz1dGRoYmTZqkjRs36qKLLlJqaqoOHjxooz4AAFAJHK8BAAgPQYfuqVOnasyYMRo9erQ6dOig559/XvHx8Xr55Zdt1AcAACqB4zUAAOEhqNB98uRJbdiwQf379///O4iJUf/+/bV69WrHiwMAAMHjeA0AQPioEczgvLw8FRcXq2HDhqXWN2zYUJ9//nm523i9Xnm93sByfn6+JOnIkSPy+XzB1luuGqeOO7KfSt2336ioyK8avhgV+11nHHv48OEqqqr68/l8Kioq0uHDh+V2u0NdTsSryO9QMHPdlmj8HYr2uR6qf9+Dne9Oz83CwkJJkjGmUtuH6/G6ZD6H8t+Rigr3f28i6d+GUJ6nVVTJ7zz9PHf00lmR0s9o7WVFj9dBhe7KyM7OVlZWVpn1rVq1sn3XVWZ4Bccl/8VqGYB1FZ3rtvA7hKoUzHy3NTcLCwuVlJRkZ+c/Ew3H62Dw7030CfUxrjqhl86in86x1cuzHa+DCt3JycmKjY3VgQMHSq0/cOCAGjVqVO42EyZMUEZGRmDZ7/fryJEjqlevnlyu8P4rd0UUFBQoJSVFX3/9tRITE0NdTtSg71WPnocGfQ+NUPfdGKPCwkI1adKkUtuH6/E61H2tTuils+inc+ils+inc2z0sqLH66BCd82aNdW1a1ctX75cQ4YMkfTjQXn58uUaO3Zsudt4PB55PJ5S6+rUqRPM3UaExMREfhFCgL5XPXoeGvQ9NELZ93N5hTvcj9fMZ+fQS2fRT+fQS2fRT+c43cuKHK+Dvrw8IyNDaWlp6tatm7p3765p06bp+PHjGj16dKWKBAAAzuN4DQBAeAg6dN9yyy06dOiQHnvsMe3fv19dunTR4sWLy3xYCwAACB2O1wAAhIdKfZDa2LFjT3t5WrTxeDyaNGlSmUvyYBd9r3r0PDToe2hUl76H2/G6uvQ1HNBLZ9FP59BLZ9FP54Syly5T2e8jAQAAAAAAZxQT6gIAAAAAAKiuCN0AAAAAAFhC6AYAAAAAwBJC989Mnz5dLVu2VFxcnHr06KG1a9eeduxbb72lbt26qU6dOqpdu7a6dOmiV199tdSY2267TS6Xq9TP1VdfbfthRJxg+v5T8+bNk8vlCnwPbQljjB577DE1btxYtWrVUv/+/bVjxw4LlUc2p/vOfK+YYPo+e/bsMj2Ni4srNYb5XjFO9535Xr5g/11544031L59e8XFxenCCy/UokWLqqjS8BdML2fOnKk+ffqobt26qlu3rvr371/hf9OjhdPHvGgWbC+PHj2q9PR0NW7cWB6PRxdccAG/6z8RbD+nTZumdu3aqVatWkpJSdEDDzygEydOVFG14WvlypUaPHiwmjRpIpfLpbfffvus2+Tm5uriiy+Wx+NR27ZtNXv2bDvFGQTMmzfP1KxZ07z88stm27ZtZsyYMaZOnTrmwIED5Y5fsWKFeeutt8ynn35qdu7caaZNm2ZiY2PN4sWLA2PS0tLM1Vdfbb777rvAz5EjR6rqIUWEYPteYs+ePaZp06amT58+5oYbbih125QpU0xSUpJ5++23zccff2yuv/5606pVK/PDDz9YfCSRxUbfme9nF2zfZ82aZRITE0v1dP/+/aXGMN/Pzkbfme9lBdvnf/3rXyY2NtY8+eST5tNPPzX/9V//Zdxut9myZUsVVx5+gu3l8OHDzfTp082mTZvMZ599Zm677TaTlJRkvvnmmyquPDzZOOZFq2B76fV6Tbdu3cw111xjPvroI7Nnzx6Tm5trNm/eXMWVh6dg+zlnzhzj8XjMnDlzzJ49e8ySJUtM48aNzQMPPFDFlYefRYsWmUcffdS89dZbRpJZuHDhGcfv3r3bxMfHm4yMDPPpp5+aZ555pkyWcwqh+ye6d+9u0tPTA8vFxcWmSZMmJjs7u8L7+NWvfmX+67/+K7CclpbGP9JnUZm+nzp1yvTq1cv87W9/K9Njv99vGjVqZJ566qnAuqNHjxqPx2Nee+01K48hEjndd2OY7xURbN9nzZplkpKSTrs/5nvFON13Y5jv5Qm2z0OHDjXXXnttqXU9evQwd999t9U6I8G5npOcOnXKJCQkmFdeecVWiRHFxjEvWgXbyxkzZpjWrVubkydPVlWJESXYfqanp5srr7yy1LqMjAzTu3dvq3VGmoqE7j/84Q+mY8eOpdbdcsstJjU11fF6uLz8P06ePKkNGzaof//+gXUxMTHq37+/Vq9efdbtjTFavny5tm/frssvv7zUbbm5uWrQoIHatWune+65R4cPH3a8/khV2b7/8Y9/VIMGDXTHHXeUuW3Pnj3av39/qX0mJSWpR48eFXouo4GNvpdgvp9eZft+7NgxtWjRQikpKbrhhhu0bdu2wG3M97Oz0fcSzPf/rzJ9Xr16danxkpSamhr1c/dcz0kkqaioSD6fT+eff76tMiOGzWNetKlML9955x317NlT6enpatiwoTp16qQnnnhCxcXFVVV22KpMP3v16qUNGzYELkHfvXu3Fi1apGuuuaZKaq5OqvIYVMPxPUaovLw8FRcXq2HDhqXWN2zYUJ9//vlpt8vPz1fTpk3l9XoVGxur5557TgMGDAjcfvXVV+vXv/61WrVqpV27dumRRx7RoEGDtHr1asXGxlp7PJGiMn3/6KOP9NJLL2nz5s3l3r5///7APn6+z5Lbop2NvkvM97OpTN/btWunl19+WZ07d1Z+fr6efvpp9erVS9u2bVOzZs2Y7xVgo+8S8/3nKtPn/fv3M3fLUdlzkp96+OGH1aRJkzInlNHI1jEvGlWml7t379YHH3ygESNGaNGiRdq5c6fuvfde+Xw+TZo0qSrKDluV6efw4cOVl5enyy67TMYYnTp1Sr/73e/0yCOPVEXJ1crpjkEFBQX64YcfVKtWLcfui9B9jhISErR582YdO3ZMy5cvV0ZGhlq3bq1+/fpJkoYNGxYYe+GFF6pz585q06aNcnNzddVVV4Wo6shVWFiokSNHaubMmUpOTg51OVGjon1nvjuvZ8+e6tmzZ2C5V69e+uUvf6kXXnhBkydPDmFl1VtF+s58R7iaMmWK5s2bp9zc3DIfAIiz41zDWX6/Xw0aNNCLL76o2NhYde3aVfv27dNTTz0V9aG7MnJzc/XEE0/oueeeU48ePbRz507df//9mjx5siZOnBjq8nAahO7/SE5OVmxsrA4cOFBq/YEDB9SoUaPTbhcTE6O2bdtKkrp06aLPPvtM2dnZgdD9c61bt1ZycrJ27tzJSZmC7/uuXbu0d+9eDR48OLDO7/dLkmrUqKHt27cHtjtw4IAaN25cap9dunSx8Cgij42+t2nTpsx2zPfSKvvvzE+53W796le/0s6dOyWJ+V4BNvpenmif75Xpc6NGjc7peamuzmXOPv3005oyZYqWLVumzp072ywzYlTVMS8aVGZuNm7cWG63u9QVQL/85S+1f/9+nTx5UjVr1rRaczirTD8nTpyokSNH6s4775T04x99jx8/rrvuukuPPvqoYmJ493BFne4YlJiY6Oir3BJfGRZQs2ZNde3aVcuXLw+s8/v9Wr58ealXO87G7/fL6/We9vZvvvlGhw8fLnVyHM2C7Xv79u21ZcsWbd68OfBz/fXX64orrtDmzZuVkpKiVq1aqVGjRqX2WVBQoH//+99BPZfVmY2+l4f5XpoT/84UFxdry5YtgZ4y38/ORt/LE+3zvTJ97tmzZ6nxkpSTkxP1c7eyc/bJJ5/U5MmTtXjxYnXr1q0qSo0IVXXMiwaVmZu9e/fWzp07A3+4kKQvvvhCjRs3jurALVWun0VFRWWCdckfNH78/DBUVJUegxz/aLYINm/ePOPxeMzs2bPNp59+au666y5Tp06dwNfEjBw50owfPz4w/oknnjBLly41u3btMp9++ql5+umnTY0aNczMmTONMcYUFhaaBx980Kxevdrs2bPHLFu2zFx88cXmF7/4hTlx4kRIHmM4CrbvP1feJ4pOmTLF1KlTx/zzn/80n3zyibnhhhv4CqWfcbrvzPeKCbbvWVlZZsmSJWbXrl1mw4YNZtiwYSYuLs5s27YtMIb5fnZO9535Xr5g+/yvf/3L1KhRwzz99NPms88+M5MmTeIrw/4j2F5OmTLF1KxZ0yxYsKDU19gVFhaG6iGEFRvnGtEq2F5+9dVXJiEhwYwdO9Zs377dvPvuu6ZBgwbmT3/6U6geQlgJtp+TJk0yCQkJ5rXXXjO7d+82S5cuNW3atDFDhw4N1UMIG4WFhWbTpk1m06ZNRpKZOnWq2bRpk/nyyy+NMcaMHz/ejBw5MjC+5CvDHnroIfPZZ5+Z6dOn85VhVeWZZ54xzZs3NzVr1jTdu3c3a9asCdzWt29fk5aWFlh+9NFHTdu2bU1cXJypW7eu6dmzp5k3b17g9qKiIjNw4EBTv35943a7TYsWLcyYMWPKfNcrguv7z5V3IPT7/WbixImmYcOGxuPxmKuuusps377dUvWRy8m+M98rLpi+jxs3LjC2YcOG5pprrjEbN24stT/me8U42Xfm++kF++/K66+/bi644AJTs2ZN07FjR/Pee+9VccXhK5hetmjRwkgq8zNp0qSqLzxMOX2uEc2C7eWqVatMjx49jMfjMa1btzaPP/64OXXqVBVXHb6C6afP5zOZmZmmTZs2Ji4uzqSkpJh7773XfP/991VfeJhZsWJFuf8OlvQvLS3N9O3bt8w2Xbp0MTVr1jStW7c2s2bNslKbyxiuQwAAAAAAwAbe0w0AAAAAgCWEbgAAAAAALCF0AwAAAABgCaEbAAAAAABLCN0AAAAAAFhC6AYAAAAAwBJCNwAAAAAAlhC6AQAAAACwhNANAAAAAIAlhG4AAAAAACwhdAMAAAAAYAmhGwAAAAAASwjdAAAAAABYQugGAAAAAMASQjcAAAAAAJYQugEAAAAAsITQDQAAAACAJYRuAAAAAAAsIXQDVWDdunXq1auXateuLZfLpc2bN1dou9mzZ8vlcmnv3r1W6wtnubm5crlcys3NDXUpAAAAQNBqhLoAoLrz+Xy6+eabFRcXp//+7/9WfHy8WrRoEeqyAAAAAFQBQjdg2a5du/Tll19q5syZuvPOO0NdDgAAAIAqxOXlgGUHDx6UJNWpUye0hVQxv9+vEydOhLoMAAAAIKQI3YBFt912m/r27StJuvnmm+VyudSvXz998sknuu2229S6dWvFxcWpUaNGuv3223X48OGz7nP9+vVKTU1VcnKyatWqpVatWun2228vNcbv92vatGnq2LGj4uLi1LBhQ9199936/vvvg6o/MzNTLpdLn3/+uYYOHarExETVq1dP999/f5lA7XK5NHbsWM2ZM0cdO3aUx+PR4sWLJUn79u3T7bffroYNG8rj8ahjx456+eWXy9zfN998oyFDhqh27dpq0KCBHnjgAXm93qBqBgAAAMIJl5cDFt19991q2rSpnnjiCf3+97/XJZdcooYNGyonJ0e7d+/W6NGj1ahRI23btk0vvviitm3bpjVr1sjlcpW7v4MHD2rgwIGqX7++xo8frzp16mjv3r166623ytzv7NmzNXr0aP3+97/Xnj179Oyzz2rTpk3617/+JbfbHdTjGDp0qFq2bKns7GytWbNG//M//6Pvv/9ef//730uN++CDD/T6669r7NixSk5OVsuWLXXgwAFdeumlgVBev359vf/++7rjjjtUUFCgcePGSZJ++OEHXXXVVfrqq6/0+9//Xk2aNNGrr76qDz74IKhaAQAAgLBiAFi1YsUKI8m88cYbgXVFRUVlxr322mtGklm5cmVg3axZs4wks2fPHmOMMQsXLjSSzLp16057f//3f/9nJJk5c+aUWr948eJy15/JpEmTjCRz/fXXl1p/7733Gknm448/DqyTZGJiYsy2bdtKjb3jjjtM48aNTV5eXqn1w4YNM0lJSYFeTJs2zUgyr7/+emDM8ePHTdu2bY0ks2LFigrXDQAAAIQLLi8HQqBWrVqB/z9x4oTy8vJ06aWXSpI2btx42u1K3hf+7rvvyufzlTvmjTfeUFJSkgYMGKC8vLzAT9euXXXeeedpxYoVQdebnp5eavm+++6TJC1atKjU+r59+6pDhw6BZWOM3nzzTQ0ePFjGmFL1pKamKj8/P/B4Fy1apMaNG+s3v/lNYPv4+HjdddddQdcLAAAAhAtCNxACR44c0f3336+GDRuqVq1aql+/vlq1aiVJys/PP+12ffv21U033aSsrCwlJyfrhhtu0KxZs0q973nHjh3Kz89XgwYNVL9+/VI/x44dC3ywWzB+8YtflFpu06aNYmJiynx/eMljKHHo0CEdPXpUL774YplaRo8eLen/f9Dcl19+qbZt25a5tL5du3ZB1wsAAACEC97TDYTA0KFDtWrVKj300EPq0qWLzjvvPPn9fl199dXy+/2n3c7lcmnBggVas2aN/vd//1dLlizR7bffrr/85S9as2ZNYD8NGjTQnDlzyt1H/fr1z7n+073n/Kev4EsKPJbf/va3SktLK3ebzp07n3M9AAAAQLgidANV7Pvvv9fy5cuVlZWlxx57LLB+x44dFd7HpZdeqksvvVSPP/645s6dqxEjRmjevHm688471aZNGy1btky9e/cuE4Ira8eOHaVexd65c6f8fr9atmx5xu3q16+vhIQEFRcXq3///mcc26JFC23dulXGmFKhfvv27edUOwAAABBKXF4OVLHY2FhJP77f+aemTZt21m2///77Mtt16dJFkgKXmA8dOlTFxcWaPHlyme1PnTqlo0ePBl3z9OnTSy0/88wzkqRBgwadcbvY2FjddNNNevPNN7V169Yytx86dCjw/9dcc42+/fZbLViwILCuqKhIL774YtD1AgAAAOGCV7qBKpaYmKjLL79cTz75pHw+n5o2baqlS5dqz549Z932lVde0XPPPacbb7xRbdq0UWFhoWbOnKnExERdc801kn583/fdd9+t7Oxsbd68WQMHDpTb7daOHTv0xhtv6K9//WupDyuriD179uj666/X1VdfrdWrV+sf//iHhg8frosuuuis206ZMkUrVqxQjx49NGbMGHXo0EFHjhzRxo0btWzZMh05ckSSNGbMGD377LMaNWqUNmzYoMaNG+vVV19VfHx8ULUCAAAA4YTQDYTA3Llzdd9992n69OkyxmjgwIF6//331aRJkzNu17dvX61du1bz5s3TgQMHlJSUpO7du2vOnDmlLv9+/vnn1bVrV73wwgt65JFHVKNGDbVs2VK//e1v1bt376DrnT9/vh577DGNHz9eNWrU0NixY/XUU09VaNuGDRtq7dq1+uMf/6i33npLzz33nOrVq6eOHTvqz3/+c2BcfHy8li9frvvuu0/PPPOM4uPjNWLECA0aNEhXX3110DUDAAAA4cBlfn6tKgD8R2ZmprKysnTo0CElJyeHuhwAAAAg4vCebgAAAAAALOHyciAKHTt2TMeOHTvjGCe+WgwAAACIdoRuIAo9/fTTysrKOuOYinywGwAAAIAz4z3dQBTavXu3du/efcYxl112meLi4qqoIgAAAKB6InQDAAAAAGAJH6QGAAAAAIAlVf6ebr/fr2+//VYJCQlyuVxVffcAAIQ9Y4wKCwvVpEkTxcTw93EAACJZlYfub7/9VikpKVV9twAARJyvv/5azZo1C3UZAADgHFR56E5ISJD044lEYmLiOe/P5/Np6dKlGjhwoNxu9znvL9rRT+fQS2fRT+fQS2fZ6GdBQYFSUlICx0wAABC5qjx0l1xSnpiY6Fjojo+PV2JiIiePDqCfzqGXzqKfzqGXzrLZT96GBQBA5OONYgAAAAAAWELoBgAAAADAEkI3AAAAAACWBBW6MzMz5XK5Sv20b9/eVm0AAAAAAES0oD9IrWPHjlq2bNn/30GNKv8sNgAAAAAAIkLQiblGjRpq1KiRjVoAAAAAAKhWgn5P944dO9SkSRO1bt1aI0aM0FdffWWjLgAAAAAAIl5Qr3T36NFDs2fPVrt27fTdd98pKytLffr00datW5WQkFDuNl6vV16vN7BcUFAg6cfvNfX5fOdQugL7+el/cW7op3PopbPop3PopbNs9JPnBgCA6sNljDGV3fjo0aNq0aKFpk6dqjvuuKPcMZmZmcrKyiqzfu7cuYqPj6/sXQMAUG0VFRVp+PDhys/PV2JiYqjLAQAA5+CcQrckXXLJJerfv7+ys7PLvb28V7pTUlKUl5fnyImEz+dTTk6OBgwYILfbfc77i3b00zmR1MtOmUtCXcJZeWKMJnfzR0Q/w10kzc1IYKOfBQUFSk5OJnQDAFANnNNHjx87dky7du3SyJEjTzvG4/HI4/GUWe92ux092XN6f9GOfjonEnrpLXaFuoQKi4R+Rgp66Swn+8nzAgBA9RHUB6k9+OCD+vDDD7V3716tWrVKN954o2JjY3Xrrbfaqg8AAAAAgIgV1Cvd33zzjW699VYdPnxY9evX12WXXaY1a9aofv36tuoDAAAAACBiBRW6582bZ6sOAAAAAACqnaC/pxsAAAAAAFQMoRsAAAAAAEsI3QAAAAAAWELoBgAAAADAEkI3AAAAAACWELoBAAAAALCE0A0AAAAAgCWEbgAAAAAALCF0AwAAAABgCaEbAAAAAABLCN0AAAAAAFhC6AYAAAAAwBJCNwAAAAAAlhC6AQAAAACwhNANAAAAAIAlhG4AAAAAACwhdAMAAAAAYAmhGwAAAAAASwjdAAAAAABYQugGAAAAAMASQjcAAAAAAJYQugEAAAAAsITQDQAAAACAJYRuAAAAAAAsIXQDAAAAAGAJoRsAAAAAAEsI3QAAAAAAWELoBgAAAADAEkI3AAAAAACWELoBAAAAALCE0A0AAAAAgCWEbgAAAAAALCF0AwAAAABgyTmF7ilTpsjlcmncuHEOlQMAAAAAQPVR6dC9bt06vfDCC+rcubOT9QAAAAAAUG1UKnQfO3ZMI0aM0MyZM1W3bl2nawIAAAAAoFqoVOhOT0/Xtddeq/79+ztdDwAAAAAA1UaNYDeYN2+eNm7cqHXr1lVovNfrldfrDSwXFBRIknw+n3w+X7B3X0bJPpzYF+inkyKpl55YE+oSzsoT82ONkdDPcBdJczMS2Ognzw0AANWHyxhT4bPtr7/+Wt26dVNOTk7gvdz9+vVTly5dNG3atHK3yczMVFZWVpn1c+fOVXx8fOWqBgCgGisqKtLw4cOVn5+vxMTEUJcDAADOQVCh++2339aNN96o2NjYwLri4mK5XC7FxMTI6/WWuk0q/5XulJQU5eXlOXIi4fP5lJOTowEDBsjtdp/z/qId/XROJPWyU+aSUJdwVp4Yo8nd/BHRz3AXSXMzEtjoZ0FBgZKTkwndAABUA0FdXn7VVVdpy5YtpdaNHj1a7du318MPP1wmcEuSx+ORx+Mps97tdjt6suf0/qId/XROJPTSW+wKdQkVFgn9jBT00llO9pPnBQCA6iOo0J2QkKBOnTqVWle7dm3Vq1evzHoAAAAAAKJdpb+nGwAAAAAAnFnQn17+c7m5uQ6UAQAAAABA9cMr3QAAAAAAWELoBgAAAADAEkI3AAAAAACWELoBAAAAALCE0A0AAAAAgCWEbgAAAAAALCF0AwAAAABgCaEbAAAAAABLCN0AAAAAAFhC6AYAAAAAwBJCNwAAAAAAlhC6AQAAAACwhNANAAAAAIAlhG4AAAAAACwhdAMAAAAAYAmhGwAAAAAASwjdAAAAAABYQugGAAAAAMASQjcAAAAAAJYQugEAAAAAsITQDQAAAACAJYRuAAAAAAAsIXQDAAAAAGAJoRsAAAAAAEsI3QAAAAAAWELoBgAAAADAEkI3AAAAAACWELoBAAAAALCE0A0AAAAAgCWEbgAAAAAALCF0AwAAAABgCaEbAAAAAABLCN0AAAAAAFhC6AYAAAAAwJKgQveMGTPUuXNnJSYmKjExUT179tT7779vqzYAAAAAACJaUKG7WbNmmjJlijZs2KD169fryiuv1A033KBt27bZqg8AAAAAgIhVI5jBgwcPLrX8+OOPa8aMGVqzZo06duzoaGEAAAAAAES6oEL3TxUXF+uNN97Q8ePH1bNnz9OO83q98nq9geWCggJJks/nk8/nq+zdB5Tsw4l9gX46KZJ66Yk1oS7hrDwxP9YYCf0Md5E0NyOBjX7y3AAAUH24jDFBnW1v2bJFPXv21IkTJ3Teeedp7ty5uuaaa047PjMzU1lZWWXWz507V/Hx8cFXDABANVdUVKThw4crPz9fiYmJoS4HAACcg6BD98mTJ/XVV18pPz9fCxYs0N/+9jd9+OGH6tChQ7njy3ulOyUlRXl5eY6cSPh8PuXk5Gji+hh5/a5z3p9NWzNTQ13CWZX0c8CAAXK73aEuJ6JFUi87ZS4JdQln5YkxmtzNHxH9DHeRNDcjgY1+FhQUKDk5mdANAEA1EPTl5TVr1lTbtm0lSV27dtW6dev017/+VS+88EK54z0ejzweT5n1brfb0ZM9r98lb3F4h+5IOrl1+vmJZpHQy3D/3fmpSOhnpKCXznKynzwvAABUH+f8Pd1+v7/UK9kAAAAAAOBHQb3SPWHCBA0aNEjNmzdXYWGh5s6dq9zcXC1ZEv6XpgIAAAAAUNWCCt0HDx7UqFGj9N133ykpKUmdO3fWkiVLNGDAAFv1AQAAAAAQsYIK3S+99JKtOgAAAAAAqHbO+T3dAAAAAACgfIRuAAAAAAAsIXQDAAAAAGAJoRsAAAAAAEsI3QAAAAAAWELoBgAAAADAEkI3AAAAAACWELoBAAAAALCE0A0AAAAAgCWEbgAAAAAALCF0AwAAAABgCaEbAAAAAABLCN0AAAAAAFhC6AYAAAAAwBJCNwAAAAAAlhC6AQAAAACwhNANAAAAAIAlhG4AAAAAACwhdAMAAAAAYAmhGwAAAAAASwjdAAAAAABYQugGAAAAAMASQjcAAAAAAJYQugEAAAAAsITQDQAAAACAJYRuAAAAAAAsIXQDAAAAAGAJoRsAAAAAAEsI3QAAAAAAWELoBgAAAADAEkI3AAAAAACWELoBAAAAALCE0A0AAAAAgCVBhe7s7GxdcsklSkhIUIMGDTRkyBBt377dVm0AAAAAAES0oEL3hx9+qPT0dK1Zs0Y5OTny+XwaOHCgjh8/bqs+AAAAAAAiVo1gBi9evLjU8uzZs9WgQQNt2LBBl19+uaOFAQAAAAAQ6YIK3T+Xn58vSTr//PNPO8br9crr9QaWCwoKJEk+n08+n+9c7j6wH0nyxJhz3pdtTjxe20pqjIRaw10k9dITG/6/PyW/45HQz3AXSXMzEtjoJ88NAADVh8sYU6mzbb/fr+uvv15Hjx7VRx99dNpxmZmZysrKKrN+7ty5io+Pr8xdAwBQrRUVFWn48OHKz89XYmJiqMsBAADnoNKh+5577tH777+vjz76SM2aNTvtuPJe6U5JSVFeXp4jJxI+n085OTmauD5GXr/rnPdn09bM1FCXcFYl/RwwYIDcbneoy4lokdTLTplLQl3CWXlijCZ380dEP8NdJM3NSGCjnwUFBUpOTiZ0AwBQDVTq8vKxY8fq3Xff1cqVK88YuCXJ4/HI4/GUWe92ux092fP6XfIWh3fojqSTW6efn2gWCb0M99+dn4qEfkYKeuksJ/vJ8wIAQPURVOg2xui+++7TwoULlZubq1atWtmqCwAAAACAiBdU6E5PT9fcuXP1z3/+UwkJCdq/f78kKSkpSbVq1bJSIAAAAAAAkSqo7+meMWOG8vPz1a9fPzVu3DjwM3/+fFv1AQAAAAAQsYK+vBwAAAAAAFRMUK90AwAAAACAiiN0AwAAAABgCaEbAAAAAABLCN0AAAAAAFhC6AYAAAAAwBJCNwAAAAAAlhC6AQAAAACwhNANAAAAAIAlhG4AAAAAACwhdAMAAAAAYAmhGwAAAAAASwjdAAAAAABYQugGAAAAAMASQjcAAAAAAJYQugEAAAAAsITQDQAAAACAJYRuAAAAAAAsIXQDAAAAAGAJoRsAAAAAAEsI3QAAAAAAWELoBgAAAADAEkI3AAAAAACWELoBAAAAALCE0A0AAAAAgCWEbgAAAAAALCF0AwAAAABgCaEbAAAAAABLCN0AAAAAAFhC6AYAAAAAwBJCNwAAAAAAlhC6AQAAAACwhNANAAAAAIAlhG4AAAAAACwJOnSvXLlSgwcPVpMmTeRyufT2229bKAsAAAAAgMgXdOg+fvy4LrroIk2fPt1GPQAAAAAAVBs1gt1g0KBBGjRokI1aAAAAAACoVoIO3cHyer3yer2B5YKCAkmSz+eTz+c75/2X7MMTY855X7Y58XhtK6kxEmoNd5HUS09s+P/+lPyOR0I/w10kzc1IYKOfPDcAAFQfLmNMpc+2XS6XFi5cqCFDhpx2TGZmprKyssqsnzt3ruLj4yt71wAAVFtFRUUaPny48vPzlZiYGOpyAADAObAeust7pTslJUV5eXmOnEj4fD7l5ORo4voYef2uc96fTVszU0NdwlmV9HPAgAFyu92hLueMOmUuCXUJZ+SJMZrczU8vHVLSz3D/Xef33FmRNDed7GdBQYGSk5MJ3QAAVAPWLy/3eDzyeDxl1rvdbkdP9rx+l7zF4XsiLinsT25/yunnx4Zwf75L0Etnhfvverg/1z/F3HSWk/0M9+cFAABUHN/TDQAAAACAJUG/0n3s2DHt3LkzsLxnzx5t3rxZ559/vpo3b+5ocQAAAAAARLKgQ/f69et1xRVXBJYzMjIkSWlpaZo9e7ZjhQEAAAAAEOmCDt39+vXTOXz2GgAAAAAAUYP3dAMAAAAAYAmhGwAAAAAASwjdAAAAAABYQugGAAAAAMASQjcAAAAAAJYQugEAAAAAsITQDQAAAACAJYRuAAAAAAAsIXQDAAAAAGAJoRsAAAAAAEsI3QAAAAAAWELoBgAAAADAEkI3AAAAAACWELoBAAAAALCE0A0AAAAAgCWEbgAAAAAALCF0AwAAAABgCaEbAAAAAABLCN0AAAAAAFhC6AYAAAAAwBJCNwAAAAAAlhC6AQAAAACwhNANAAAAAIAlhG4AAAAAACwhdAMAAAAAYAmhGwAAAAAASwjdAAAAAABYQugGAAAAAMASQjcAAAAAAJYQugEAAAAAsITQDQAAAACAJYRuAAAAAAAsIXQDAAAAAGBJpUL39OnT1bJlS8XFxalHjx5au3at03UBAAAAABDxgg7d8+fPV0ZGhiZNmqSNGzfqoosuUmpqqg4ePGijPgAAAAAAIlbQoXvq1KkaM2aMRo8erQ4dOuj5559XfHy8Xn75ZRv1AQAAAAAQsWoEM/jkyZPasGGDJkyYEFgXExOj/v37a/Xq1eVu4/V65fV6A8v5+fmSpCNHjsjn81Wm5lJ8Pp+KiopUwxejYr/rnPdn0+HDh0NdwlmV9PPw4cNyu92hLueMapw6HuoSzqiG36ioyE8vHVLSz3D/Xef33FmRNDed7GdhYaEkyRjjyP4AAEDoBBW68/LyVFxcrIYNG5Za37BhQ33++eflbpOdna2srKwy61u1ahXMXVcLyX8JdQWoasNDXUA1Ewn95Pc8Otmam4WFhUpKSrK0dwAAUBWCCt2VMWHCBGVkZASW/X6/jhw5onr16snlOvdXqwoKCpSSkqKvv/5aiYmJ57y/aEc/nUMvnUU/nUMvnWWjn8YYFRYWqkmTJo7sDwAAhE5QoTs5OVmxsbE6cOBAqfUHDhxQo0aNyt3G4/HI4/GUWlenTp3gqqyAxMRETh4dRD+dQy+dRT+dQy+d5XQ/eYUbAIDqIagPUqtZs6a6du2q5cuXB9b5/X4tX75cPXv2dLw4AAAAAAAiWdCXl2dkZCgtLU3dunVT9+7dNW3aNB0/flyjR4+2UR8AAAAAABEr6NB9yy236NChQ3rssce0f/9+denSRYsXLy7z4WpVxePxaNKkSWUuYUfl0E/n0Etn0U/n0Etn0U8AAHAmLsP3kQAAAAAAYEVQ7+kGAAAAAAAVR+gGAAAAAMASQjcAAAAAAJYQugEAAAAAsCQiQvf06dPVsmVLxcXFqUePHlq7du0Zx7/xxhtq37694uLidOGFF2rRokVVVGlkCKafM2fOVJ8+fVS3bl3VrVtX/fv3P2v/o0mwc7PEvHnz5HK5NGTIELsFRphg+3n06FGlp6ercePG8ng8uuCCC/h9/49gezlt2jS1a9dOtWrVUkpKih544AGdOHGiiqoNXytXrtTgwYPVpEkTuVwuvf3222fdJjc3VxdffLE8Ho/atm2r2bNnW68TAACEr7AP3fPnz1dGRoYmTZqkjRs36qKLLlJqaqoOHjxY7vhVq1bp1ltv1R133KFNmzZpyJAhGjJkiLZu3VrFlYenYPuZm5urW2+9VStWrNDq1auVkpKigQMHat++fVVcefgJtpcl9u7dqwcffFB9+vSpokojQ7D9PHnypAYMGKC9e/dqwYIF2r59u2bOnKmmTZtWceXhJ9hezp07V+PHj9ekSZP02Wef6aWXXtL8+fP1yCOPVHHl4ef48eO66KKLNH369AqN37Nnj6699lpdccUV2rx5s8aNG6c777xTS5YssVwpAAAIWybMde/e3aSnpweWi4uLTZMmTUx2dna544cOHWquvfbaUut69Ohh7r77bqt1Ropg+/lzp06dMgkJCeaVV16xVWLEqEwvT506ZXr16mX+9re/mbS0NHPDDTdUQaWRIdh+zpgxw7Ru3dqcPHmyqkqMGMH2Mj093Vx55ZWl1mVkZJjevXtbrTPSSDILFy4845g//OEPpmPHjqXW3XLLLSY1NdViZQAAIJyF9SvdJ0+e1IYNG9S/f//AupiYGPXv31+rV68ud5vVq1eXGi9Jqamppx0fTSrTz58rKiqSz+fT+eefb6vMiFDZXv7xj39UgwYNdMcdd1RFmRGjMv1855131LNnT6Wnp6thw4bq1KmTnnjiCRUXF1dV2WGpMr3s1auXNmzYELgEfffu3Vq0aJGuueaaKqm5OuEYBAAAfq5GqAs4k7y8PBUXF6thw4al1jds2FCff/55udvs37+/3PH79++3VmekqEw/f+7hhx9WkyZNypxURpvK9PKjjz7SSy+9pM2bN1dBhZGlMv3cvXu3PvjgA40YMUKLFi3Szp07de+998rn82nSpElVUXZYqkwvhw8frry8PF122WUyxujUqVP63e9+x+XllXC6Y1BBQYF++OEH1apVK0SVAQCAUAnrV7oRXqZMmaJ58+Zp4cKFiouLC3U5EaWwsFAjR47UzJkzlZycHOpyqgW/368GDRroxRdfVNeuXXXLLbfo0Ucf1fPPPx/q0iJObm6unnjiCT333HPauHGj3nrrLb333nuaPHlyqEsDAACIeGH9SndycrJiY2N14MCBUusPHDigRo0albtNo0aNghofTSrTzxJPP/20pkyZomXLlqlz5842y4wIwfZy165d2rt3rwYPHhxY5/f7JUk1atTQ9u3b1aZNG7tFh7HKzM3GjRvL7XYrNjY2sO6Xv/yl9u/fr5MnT6pmzZpWaw5XlenlxIkTNXLkSN15552SpAsvvFDHjx/XXXfdpUcffVQxMfx9tqJOdwxKTEzkVW4AAKJUWJ9J1axZU127dtXy5csD6/x+v5YvX66ePXuWu03Pnj1LjZeknJyc046PJpXppyQ9+eSTmjx5shYvXqxu3bpVRalhL9hetm/fXlu2bNHmzZsDP9dff33gE45TUlKqsvywU5m52bt3b+3cuTPwxwtJ+uKLL9S4ceOoDdxS5XpZVFRUJliX/DHDGGOv2GqIYxAAACgj1J/kdjbz5s0zHo/HzJ4923z66afmrrvuMnXq1DH79+83xhgzcuRIM378+MD4f/3rX6ZGjRrm6aefNp999pmZNGmScbvdZsuWLaF6CGEl2H5OmTLF1KxZ0yxYsMB89913gZ/CwsJQPYSwEWwvf45PLy8t2H5+9dVXJiEhwYwdO9Zs377dvPvuu6ZBgwbmT3/6U6geQtgItpeTJk0yCQkJ5rXXXjO7d+82S5cuNW3atDFDhw4N1UMIG4WFhWbTpk1m06ZNRpKZOnWq2bRpk/nyyy+NMcaMHz/ejBw5MjB+9+7dJj4+3jz00EPms88+M9OnTzexsbFm8eLFoXoIAAAgxMI+dBtjzDPPPGOaN29uatasabp3727WrFkTuK1v374mLS2t1PjXX3/dXHDBBaZmzZqmY8eO5r333qviisNbMP1s0aKFkVTmZ9KkSVVfeBgKdm7+FKG7rGD7uWrVKtOjRw/j8XhM69atzeOPP25OnTpVxVWHp2B66fP5TGZmpmnTpo2Ji4szKSkp5t577zXff/991RceZlasWFHuv4El/UtLSzN9+/Yts02XLl1MzZo1TevWrc2sWbOqvG4AABA+XMZw7SAAAAAAADaE9Xu6AQAAAACIZIRuAAAAAAAsIXQDAAAAAGAJoRsAAAAAAEsI3QAAAAAAWELoBgAAAADAEkI3AAAAAACWELoBAAAAALCE0A0AAAAAgCWEbgAAAAAALCF0AwAAAABgCaEbAAAAAABL/h9Vo248MYERawAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "# Compute performance measures per trip (mean for each column of performance table)\n", "column_means = performance.mean()\n", @@ -938,14 +407,14 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": null, "id": "bd682c84-3eb1-4a8d-9621-b741e98e4537", "metadata": {}, "outputs": [], "source": [ "# save results\n", "# Example data for one model\n", - "model_name = 'model_a_facebook-bart-large-mnli'\n", + "model_name = 'model_MoritzLaurer-DeBERTa-v3-base-mnli-fever-anli'\n", "# Structure to save\n", "model_result = {\n", " 'model': model_name,\n", @@ -963,16 +432,97 @@ " pickle.dump(model_result, f)" ] }, + { + "cell_type": "markdown", + "id": "e1cbb54e-abe6-49b6-957e-0683196f3199", + "metadata": {}, + "source": [ + "**Load and compare results**" + ] + }, { "cell_type": "code", - "execution_count": null, - "id": "905eab28-1735-4c0e-abf8-0ffc5e799eb0", + "execution_count": 35, + "id": "62ca82b0-6909-4e6c-9d2c-fed87971e5b6", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Model: model_a_facebook-bart-large-mnli\n", + "Performance Summary:\n", + "accuracy 0.454545\n", + "true_ident 0.689394\n", + "false_pred 0.409091\n", + "dtype: float64\n", + "----------------------------------------\n", + "Model: model_b_sileod-deberta-v3-base-tasksource-nli\n", + "Performance Summary:\n", + "accuracy 0.500000\n", + "true_ident 0.666667\n", + "false_pred 0.551667\n", + "dtype: float64\n", + "----------------------------------------\n", + "Model: model_a_facebook-bart-large-mnli\n", + "Performance Summary:\n", + " superclass accuracy\n", + "0 activity_type 0.8\n", + "1 activities 0.0\n", + "2 climate_or_season 0.6\n", + "3 style_or_comfort 0.4\n", + "4 dress_code 0.7\n", + "5 accommodation 0.3\n", + "6 transportation 0.8\n", + "7 special_conditions 0.0\n", + "8 trip_length_days 0.5\n", + "----------------------------------------\n", + "Model: model_b_sileod-deberta-v3-base-tasksource-nli\n", + "Performance Summary:\n", + " superclass accuracy\n", + "0 activity_type 0.7\n", + "1 activities 0.1\n", + "2 climate_or_season 0.6\n", + "3 style_or_comfort 0.4\n", + "4 dress_code 0.6\n", + "5 accommodation 0.9\n", + "6 transportation 0.7\n", + "7 special_conditions 0.1\n", + "8 trip_length_days 0.5\n", + "----------------------------------------\n" + ] + } + ], "source": [ - "# load all model results later for comparison\n", - "with open(f'{model_name}_results.pkl', 'rb') as f:\n", - " model_result = pickle.load(f)\n" + "# Folder where your .pkl files are saved\n", + "results_dir = 'results'\n", + "\n", + "# Dictionary to store all loaded results\n", + "all_results = {}\n", + "\n", + "# Loop through all .pkl files in the folder\n", + "for filename in os.listdir(results_dir):\n", + " if filename.endswith('.pkl'):\n", + " model_name = filename.replace('_results.pkl', '') # Extract model name\n", + " file_path = os.path.join(results_dir, filename)\n", + " \n", + " # Load the result\n", + " with open(file_path, 'rb') as f:\n", + " result = pickle.load(f)\n", + " all_results[model_name] = result\n", + "\n", + "# Now you can compare performance across models\n", + "for model, data in all_results.items():\n", + " print(f\"Model: {model}\")\n", + " print(f\"Performance Summary:\\n{data['perf_summary']}\")\n", + " print(\"-\" * 40)\n", + "\n", + "\n", + "# Now you can compare performance across models\n", + "for model, data in all_results.items():\n", + " print(f\"Model: {model}\")\n", + " print(f\"Performance Summary:\\n{data['perf_superclass']}\")\n", + " print(\"-\" * 40)" ] }, {