diff --git "a/subtask_2/submission_subtask2-2.ipynb" "b/subtask_2/submission_subtask2-2.ipynb" new file mode 100644--- /dev/null +++ "b/subtask_2/submission_subtask2-2.ipynb" @@ -0,0 +1,5679 @@ +{ + "cells": [ + { + "metadata": {}, + "cell_type": "markdown", + "source": [ + "## Experiment exp027-2\n", + "xlm-roberta-large, Batch Size: 32, Learning Rate: 2e-5, Warmup Steps: 500" + ], + "id": "d10bfa50537af75f" + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T22:06:52.194727Z", + "start_time": "2025-06-27T22:06:52.191088Z" + } + }, + "cell_type": "code", + "source": [ + "import pandas as pd\n", + "import numpy as np\n", + "from sklearn.model_selection import train_test_split\n", + "from transformers import (\n", + " AutoTokenizer,\n", + " BertForTokenClassification,\n", + " AutoModelForTokenClassification\n", + ")\n", + "import torch\n", + "import os\n", + "\n", + "os.environ[\"CUDA_DEVICE_ORDER\"] = \"PCI_BUS_ID\"\n", + "os.environ[\"CUDA_VISIBLE_DEVICES\"] = '1'" + ], + "id": "9748a35a024779ae", + "outputs": [], + "execution_count": 51 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T13:53:14.197284Z", + "start_time": "2025-06-27T13:53:13.998661Z" + } + }, + "cell_type": "code", + "source": [ + "class SpanClassifierWithStrictF1:\n", + " def __init__(self, model_name=\"deepset/gbert-base\"):\n", + " self.model_name = model_name\n", + " self.tokenizer = AutoTokenizer.from_pretrained(model_name)\n", + "\n", + " self.labels =[\n", + " \"O\",\n", + " \"B-positive feedback\", \"B-compliment\", \"B-affection declaration\", \"B-encouragement\", \"B-gratitude\", \"B-agreement\", \"B-ambiguous\", \"B-implicit\", \"B-group membership\", \"B-sympathy\",\n", + " \"I-positive feedback\", \"I-compliment\", \"I-affection declaration\", \"I-encouragement\", \"I-gratitude\", \"I-agreement\", \"I-ambiguous\", \"I-implicit\", \"I-group membership\", \"I-sympathy\"\n", + " ]\n", + " self.label2id = {label: i for i, label in enumerate(self.labels)}\n", + " self.id2label = {i: label for i, label in enumerate(self.labels)}\n", + "\n", + " def create_dataset(self, comments_df, spans_df):\n", + " \"\"\"Erstelle Dataset mit BIO-Labels und speichere Evaluation-Daten\"\"\"\n", + " examples = []\n", + " eval_data = [] # Für Strict F1 Berechnung\n", + "\n", + " spans_grouped = spans_df.groupby(['document', 'comment_id'])\n", + "\n", + " for _, row in comments_df.iterrows():\n", + " text = row['comment']\n", + " document = row['document']\n", + " comment_id = row['comment_id']\n", + " key = (document, comment_id)\n", + "\n", + " # True spans für diesen Kommentar\n", + " if key in spans_grouped.groups:\n", + " true_spans = [(span_type, int(start), int(end))\n", + " for span_type, start, end in\n", + " spans_grouped.get_group(key)[['type', 'start', 'end']].values]\n", + " else:\n", + " true_spans = []\n", + "\n", + " # Tokenisierung\n", + " tokenized = self.tokenizer(text, truncation=True, max_length=512,\n", + " return_offsets_mapping=True)\n", + "\n", + " # BIO-Labels erstellen\n", + " labels = self._create_bio_labels(tokenized['offset_mapping'],\n", + " spans_grouped.get_group(key)[['start', 'end', 'type']].values\n", + " if key in spans_grouped.groups else [])\n", + "\n", + " examples.append({\n", + " 'input_ids': tokenized['input_ids'],\n", + " 'attention_mask': tokenized['attention_mask'],\n", + " 'labels': labels\n", + " })\n", + "\n", + " # Evaluation-Daten speichern\n", + " eval_data.append({\n", + " 'text': text,\n", + " 'offset_mapping': tokenized['offset_mapping'],\n", + " 'true_spans': true_spans,\n", + " 'document': document,\n", + " 'comment_id': comment_id\n", + " })\n", + "\n", + " return examples, eval_data\n", + "\n", + " def _create_bio_labels(self, offset_mapping, spans):\n", + " \"\"\"Erstelle BIO-Labels für Tokens\"\"\"\n", + " labels = [0] * len(offset_mapping) # 0 = \"O\"\n", + "\n", + " for start, end, type_label in spans:\n", + " for i, (token_start, token_end) in enumerate(offset_mapping):\n", + " if token_start is None: # Spezielle Tokens\n", + " continue\n", + "\n", + " # Token überlappt mit Span\n", + " if token_start < end and token_end > start:\n", + " if token_start <= start:\n", + " labels[i] = self.label2id[f'B-{type_label}'] # B-compliment\n", + " else:\n", + " labels[i] = self.label2id[f'I-{type_label}'] # I-compliment\n", + "\n", + " return labels\n", + "\n", + " def compute_metrics(self, eval_pred):\n", + " \"\"\"Berechne Strict F1 für Trainer\"\"\"\n", + " predictions, labels = eval_pred\n", + " predictions = np.argmax(predictions, axis=2)\n", + "\n", + " # Konvertiere Vorhersagen zu Spans\n", + " batch_pred_spans = []\n", + " batch_true_spans = []\n", + "\n", + " for i, (pred_seq, label_seq) in enumerate(zip(predictions, labels)):\n", + " # Evaluation-Daten für dieses Beispiel\n", + " if i < len(self.current_eval_data):\n", + " eval_item = self.current_eval_data[i]\n", + " text = eval_item['text']\n", + " offset_mapping = eval_item['offset_mapping']\n", + " true_spans = eval_item['true_spans']\n", + "\n", + " # Filtere gültige Vorhersagen (keine Padding-Tokens)\n", + " valid_predictions = []\n", + " valid_offsets = []\n", + "\n", + " for j, (pred_label, true_label) in enumerate(zip(pred_seq, label_seq)):\n", + " if true_label != -100 and j < len(offset_mapping):\n", + " valid_predictions.append(pred_label)\n", + " valid_offsets.append(offset_mapping[j])\n", + "\n", + " # Konvertiere zu Spans\n", + " pred_spans = self._predictions_to_spans(valid_predictions, valid_offsets, text)\n", + " pred_spans_tuples = [(span['type'], span['start'], span['end']) for span in pred_spans]\n", + "\n", + " batch_pred_spans.append(pred_spans_tuples)\n", + " batch_true_spans.append(true_spans)\n", + "\n", + " # Berechne Strict F1\n", + " strict_f1, strict_precision, strict_recall, tp, fp, fn = self._calculate_strict_f1(\n", + " batch_true_spans, batch_pred_spans\n", + " )\n", + "\n", + " torch.cuda.memory.empty_cache()\n", + "\n", + " return {\n", + " \"strict_f1\": torch.tensor(strict_f1),\n", + " \"strict_precision\": torch.tensor(strict_precision),\n", + " \"strict_recall\": torch.tensor(strict_recall),\n", + " \"true_positives\": torch.tensor(tp),\n", + " \"false_positives\": torch.tensor(fp),\n", + " \"false_negatives\": torch.tensor(fn)\n", + " }\n", + "\n", + " def _calculate_strict_f1(self, true_spans_list, pred_spans_list):\n", + " \"\"\"Berechne Strict F1 über alle Kommentare\"\"\"\n", + " tp, fp, fn = 0, 0, 0\n", + "\n", + " for true_spans, pred_spans in zip(true_spans_list, pred_spans_list):\n", + " # Finde exakte Matches (Typ und Span müssen übereinstimmen)\n", + " matches = self._find_exact_matches(true_spans, pred_spans)\n", + "\n", + " tp += len(matches)\n", + " fp += len(pred_spans) - len(matches)\n", + " fn += len(true_spans) - len(matches)\n", + "\n", + " # Berechne Metriken\n", + " precision = tp / (tp + fp) if (tp + fp) > 0 else 0.0\n", + " recall = tp / (tp + fn) if (tp + fn) > 0 else 0.0\n", + " f1 = 2 * precision * recall / (precision + recall) if (precision + recall) > 0 else 0.0\n", + "\n", + " return f1, precision, recall, tp, fp, fn\n", + "\n", + " def _find_exact_matches(self, true_spans, pred_spans):\n", + " \"\"\"Finde exakte Matches zwischen True und Predicted Spans\"\"\"\n", + " matches = []\n", + " used_pred = set()\n", + "\n", + " for true_span in true_spans:\n", + " for i, pred_span in enumerate(pred_spans):\n", + " if i not in used_pred and true_span == pred_span:\n", + " matches.append((true_span, pred_span))\n", + " used_pred.add(i)\n", + " break\n", + "\n", + " return matches\n", + "\n", + " def _predictions_to_spans(self, predicted_labels, offset_mapping, text):\n", + " \"\"\"Konvertiere Token-Vorhersagen zu Spans\"\"\"\n", + " spans = []\n", + " current_span = None\n", + "\n", + " for i, label_id in enumerate(predicted_labels):\n", + " if i >= len(offset_mapping):\n", + " break\n", + "\n", + " label = self.id2label[label_id]\n", + " token_start, token_end = offset_mapping[i]\n", + "\n", + " if token_start is None:\n", + " continue\n", + "\n", + " if label.startswith('B-'):\n", + " if current_span:\n", + " spans.append(current_span)\n", + " current_span = {\n", + " 'type': label[2:],\n", + " 'start': token_start,\n", + " 'end': token_end,\n", + " 'text': text[token_start:token_end]\n", + " }\n", + " elif label.startswith('I-') and current_span:\n", + " current_span['end'] = token_end\n", + " current_span['text'] = text[current_span['start']:current_span['end']]\n", + " else:\n", + " if current_span:\n", + " spans.append(current_span)\n", + " current_span = None\n", + "\n", + " if current_span:\n", + " spans.append(current_span)\n", + "\n", + " return spans\n", + "\n", + " def predict(self, texts):\n", + " \"\"\"Vorhersage für neue Texte\"\"\"\n", + " if not hasattr(self, 'model'):\n", + " raise ValueError(\"Modell muss erst trainiert werden!\")\n", + "\n", + " predictions = []\n", + " device = next(self.model.parameters()).device\n", + "\n", + " for text in texts:\n", + " # Tokenisierung\n", + " inputs = self.tokenizer(text, return_tensors=\"pt\", truncation=True,\n", + " max_length=512, return_offsets_mapping=True)\n", + "\n", + " offset_mapping = inputs.pop('offset_mapping')\n", + " inputs = {k: v.to(device) for k, v in inputs.items()}\n", + "\n", + " # Vorhersage\n", + " with torch.no_grad():\n", + " outputs = self.model(**inputs)\n", + "\n", + " predicted_labels = torch.argmax(outputs.logits, dim=2)[0].cpu().numpy()\n", + "\n", + " # Spans extrahieren\n", + " spans = self._predictions_to_spans(predicted_labels, offset_mapping[0], text)\n", + " predictions.append({'text': text, 'spans': spans})\n", + "\n", + " return predictions\n", + "\n", + " def evaluate_strict_f1(self, comments_df, spans_df):\n", + " \"\"\"Evaluiere Strict F1 auf Test-Daten\"\"\"\n", + " if not hasattr(self, 'model'):\n", + " raise ValueError(\"Modell muss erst trainiert werden!\")\n", + "\n", + " print(\"Evaluiere Strict F1...\")\n", + "\n", + " # Vorhersagen für alle Kommentare\n", + " texts = comments_df['comment'].tolist()\n", + " predictions = self.predict(texts)\n", + "\n", + " # Organisiere True Spans\n", + " spans_grouped = spans_df.groupby(['document', 'comment_id'])\n", + " true_spans_dict = {}\n", + " pred_spans_dict = {}\n", + "\n", + " for i, (_, row) in enumerate(comments_df.iterrows()):\n", + " key = (row['document'], row['comment_id'])\n", + "\n", + " # True spans\n", + " if key in spans_grouped.groups:\n", + " true_spans = [(span_type, int(start), int(end))\n", + " for span_type, start, end in\n", + " spans_grouped.get_group(key)[['type', 'start', 'end']].values]\n", + " else:\n", + " true_spans = []\n", + "\n", + " # Predicted spans\n", + " pred_spans = [(span['type'], span['start'], span['end'])\n", + " for span in predictions[i]['spans']]\n", + "\n", + " true_spans_dict[key] = true_spans\n", + " pred_spans_dict[key] = pred_spans\n", + "\n", + " # Berechne Strict F1\n", + " all_true_spans = list(true_spans_dict.values())\n", + " all_pred_spans = list(pred_spans_dict.values())\n", + "\n", + " f1, precision, recall, tp, fp, fn = self._calculate_strict_f1(all_true_spans, all_pred_spans)\n", + "\n", + " print(f\"\\nStrict F1 Ergebnisse:\")\n", + " print(f\"Precision: {precision:.4f}\")\n", + " print(f\"Recall: {recall:.4f}\")\n", + " print(f\"F1-Score: {f1:.4f}\")\n", + " print(f\"True Positives: {tp}, False Positives: {fp}, False Negatives: {fn}\")\n", + "\n", + " return {\n", + " 'strict_f1': f1,\n", + " 'strict_precision': precision,\n", + " 'strict_recall': recall,\n", + " 'true_positives': tp,\n", + " 'false_positives': fp,\n", + " 'false_negatives': fn\n", + " }\n", + "\n", + "def convert_spans(row):\n", + " spans = row['predicted_spans']\n", + " document = row['document']\n", + " comment_id = row['comment_id']\n", + " return [{'document': document, 'comment_id': comment_id, 'type': span['type'], 'start': span['start'], 'end': span['end']} for span in spans]\n", + "\n", + "def pred_to_spans(row):\n", + " predicted_labels, offset_mapping, text = row['predicted_labels'], row['offset_mapping'], row['comment']\n", + " return [classifier._predictions_to_spans(predicted_labels, offset_mapping, text)]" + ], + "id": "aeef20e483ed583a", + "outputs": [], + "execution_count": 3 + }, + { + "metadata": { + "collapsed": true, + "ExecuteTime": { + "end_time": "2025-06-27T13:53:15.977089Z", + "start_time": "2025-06-27T13:53:15.743335Z" + } + }, + "cell_type": "code", + "source": [ + "comments: pd.DataFrame = pd.read_csv(\"./share-GermEval2025-data/Data/training data/comments.csv\")\n", + "task1: pd.DataFrame = pd.read_csv(\"./share-GermEval2025-data/Data/training data/task1.csv\")\n", + "task2: pd.DataFrame = pd.read_csv(\"./share-GermEval2025-data/Data/training data/task2.csv\")\n", + "comments = comments.merge(task1, on=[\"document\", \"comment_id\"])\n", + "spans_grouped = task2.groupby(['document', 'comment_id'])\n", + "\n", + "test_data: pd.DataFrame = pd.read_csv(\"./share-GermEval2025-data/Data/test data/comments.csv\")\n", + "\n", + "# check every comment that contain spans if they have overlapping start and end positions\n", + "task2['overlap'] = False\n", + "overlapping_spans = task2.groupby(['document', 'comment_id'])\n", + "for (doc, comment), group in overlapping_spans:\n", + " if len(group) > 1:\n", + " starts = group['start'].tolist()\n", + " ends = group['end'].tolist()\n", + " for i in range(len(starts)):\n", + " for j in range(i + 1, len(starts)):\n", + " if not (ends[i] <= starts[j] or ends[j] <= starts[i]):\n", + " task2.loc[(task2['document'] == doc) & (task2['comment_id'] == comment), 'overlap'] = True\n", + "\n", + "task2 = task2[task2['overlap'] == False].drop(columns=['overlap'])" + ], + "id": "eb861fa831fc5e66", + "outputs": [], + "execution_count": 4 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T13:53:19.166551Z", + "start_time": "2025-06-27T13:53:19.159425Z" + } + }, + "cell_type": "code", + "source": [ + "from multiset import *\n", + "ALL_LABELS = [\"affection declaration\",\"agreement\",\"ambiguous\",\n", + " \"compliment\",\"encouragement\",\"gratitude\",\"group membership\",\n", + " \"implicit\",\"positive feedback\",\"sympathy\"]\n", + "\n", + "def fine_grained_flausch_by_label(gold, predicted):\n", + " gold['cid']= gold['document']+\"_\"+gold['comment_id'].apply(str)\n", + " predicted['cid']= predicted['document']+\"_\"+predicted['comment_id'].apply(str)\n", + "\n", + " # annotation sets (predicted)\n", + " pred_spans = Multiset()\n", + " pred_spans_loose = Multiset()\n", + " pred_types = Multiset()\n", + "\n", + " # annotation sets (gold)\n", + " gold_spans = Multiset()\n", + " gold_spans_loose = Multiset()\n", + " gold_types = Multiset()\n", + "\n", + " for row in predicted.itertuples(index=False):\n", + " pred_spans.add((row.cid,row.type,row.start,row.end))\n", + " pred_spans_loose.add((row.cid,row.start,row.end))\n", + " pred_types.add((row.cid,row.type))\n", + " for row in gold.itertuples(index=False):\n", + " gold_spans.add((row.cid,row.type,row.start,row.end))\n", + " gold_spans_loose.add((row.cid,row.start,row.end))\n", + " gold_types.add((row.cid,row.type))\n", + "\n", + " # precision = true_pos / true_pos + false_pos\n", + " # recall = true_pos / true_pos + false_neg\n", + " # f_1 = 2 * prec * rec / (prec + rec)\n", + "\n", + " results = {'TOTAL': {'STRICT': {},'SPANS': {},'TYPES': {}}}\n", + " # label-wise evaluation (only for strict and type)\n", + " for label in ALL_LABELS:\n", + " results[label] = {'STRICT': {},'TYPES': {}}\n", + " gold_spans_x = set(filter(lambda x: x[1].__eq__(label), gold_spans))\n", + " pred_spans_x = set(filter(lambda x: x[1].__eq__(label), pred_spans))\n", + " gold_types_x = set(filter(lambda x: x[1].__eq__(label), gold_types))\n", + " pred_types_x = set(filter(lambda x: x[1].__eq__(label), pred_types))\n", + "\n", + " # strict: spans + type must match\n", + " ### NOTE: x and y / x returns 0 if x = 0 and y/x otherwise (test for zero division)\n", + " strict_p = float(len(pred_spans_x)) and float( len(gold_spans_x.intersection(pred_spans_x))) / len(pred_spans_x)\n", + " strict_r = float(len(gold_spans_x)) and float( len(gold_spans_x.intersection(pred_spans_x))) / len(gold_spans_x)\n", + " strict_f = (strict_p + strict_r) and 2 * strict_p * strict_r / (strict_p + strict_r)\n", + " results[label]['STRICT']['prec'] = strict_p\n", + " results[label]['STRICT']['rec'] = strict_r\n", + " results[label]['STRICT']['f1'] = strict_f\n", + "\n", + " # detection mode: only types must match (per post)\n", + " types_p = float(len(pred_types_x)) and float( len(gold_types_x.intersection(pred_types_x))) / len(pred_types_x)\n", + " types_r = float(len(gold_types_x)) and float( len(gold_types_x.intersection(pred_types_x))) / len(gold_types_x)\n", + " types_f = (types_p + types_r) and 2 * types_p * types_r / (types_p + types_r)\n", + " results[label]['TYPES']['prec'] = types_p\n", + " results[label]['TYPES']['rec'] = types_r\n", + " results[label]['TYPES']['f1'] = types_f\n", + "\n", + " # Overall evaluation\n", + " # strict: spans + type must match\n", + " strict_p = float(len(pred_spans)) and float( len(gold_spans.intersection(pred_spans))) / len(pred_spans)\n", + " strict_r = float(len(gold_spans)) and float( len(gold_spans.intersection(pred_spans))) / len(gold_spans)\n", + " strict_f = (strict_p + strict_r) and 2 * strict_p * strict_r / (strict_p + strict_r)\n", + " results['TOTAL']['STRICT']['prec'] = strict_p\n", + " results['TOTAL']['STRICT']['rec'] = strict_r\n", + " results['TOTAL']['STRICT']['f1'] = strict_f\n", + "\n", + " # spans: spans must match\n", + " spans_p = float(len(pred_spans_loose)) and float( len(gold_spans_loose.intersection(pred_spans_loose))) / len(pred_spans_loose)\n", + " spans_r = float(len(gold_spans_loose)) and float( len(gold_spans_loose.intersection(pred_spans_loose))) / len(gold_spans_loose)\n", + " spans_f = (spans_p + spans_r) and 2 * spans_p * spans_r / (spans_p + spans_r)\n", + " results['TOTAL']['SPANS']['prec'] = spans_p\n", + " results['TOTAL']['SPANS']['rec'] = spans_r\n", + " results['TOTAL']['SPANS']['f1'] = spans_f\n", + "\n", + " # detection mode: only types must match (per post)\n", + " types_p = float(len(pred_types)) and float( len(gold_types.intersection(pred_types))) / len(pred_types)\n", + " types_r = float(len(gold_types)) and float( len(gold_types.intersection(pred_types))) / len(gold_types)\n", + " types_f = (types_p + types_r) and 2 * types_p * types_r / (types_p + types_r)\n", + " results['TOTAL']['TYPES']['prec'] = types_p\n", + " results['TOTAL']['TYPES']['rec'] = types_r\n", + " results['TOTAL']['TYPES']['f1'] = types_f\n", + "\n", + "# print(\"STRICT:\\n \",strict_p,strict_r,strict_f)\n", + "# print(\"SPANS:\\n \",spans_p,spans_r,spans_f)\n", + "# print(\"TYPES:\\n \",types_p,types_r,types_f)\n", + " return(results)" + ], + "id": "c8d1b091bbe9684c", + "outputs": [], + "execution_count": 6 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T13:53:20.379955Z", + "start_time": "2025-06-27T13:53:19.294053Z" + } + }, + "cell_type": "code", + "source": "classifier = SpanClassifierWithStrictF1('xlm-roberta-large')", + "id": "da0c571795f194fe", + "outputs": [], + "execution_count": 7 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T13:53:30.242627Z", + "start_time": "2025-06-27T13:53:22.916172Z" + } + }, + "cell_type": "code", + "source": [ + "# Dataset neu erstellen für diesen Fold\n", + "examples, eval_data = classifier.create_dataset(comments, task2)\n", + "train_examples, val_examples = train_test_split(examples, test_size=0.1, random_state=42)\n", + "\n", + "# Evaluation-Daten entsprechend aufteilen\n", + "train_indices, val_indices = train_test_split(range(len(examples)), test_size=0.1, random_state=42)" + ], + "id": "e66b73a1f8c2f5d5", + "outputs": [], + "execution_count": 8 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T13:56:06.024808Z", + "start_time": "2025-06-27T13:56:03.635385Z" + } + }, + "cell_type": "code", + "source": [ + "classifier.model = AutoModelForTokenClassification.from_pretrained(\n", + " 'xlm-roberta-large',\n", + " num_labels=len(classifier.labels),\n", + " id2label=classifier.id2label,\n", + " label2id=classifier.label2id\n", + ")\n", + "classifier.model.load_state_dict(torch.load('./experiments/exp027/exp027-2_retraining_final_model.pth'))\n", + "classifier.model.eval()" + ], + "id": "c9fa9db4db077569", + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Some weights of XLMRobertaForTokenClassification were not initialized from the model checkpoint at xlm-roberta-large and are newly initialized: ['classifier.bias', 'classifier.weight']\n", + "You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n" + ] + }, + { + "data": { + "text/plain": [ + "XLMRobertaForTokenClassification(\n", + " (roberta): XLMRobertaModel(\n", + " (embeddings): XLMRobertaEmbeddings(\n", + " (word_embeddings): Embedding(250002, 1024, padding_idx=1)\n", + " (position_embeddings): Embedding(514, 1024, padding_idx=1)\n", + " (token_type_embeddings): Embedding(1, 1024)\n", + " (LayerNorm): LayerNorm((1024,), eps=1e-05, elementwise_affine=True)\n", + " (dropout): Dropout(p=0.1, inplace=False)\n", + " )\n", + " (encoder): XLMRobertaEncoder(\n", + " (layer): ModuleList(\n", + " (0-23): 24 x XLMRobertaLayer(\n", + " (attention): XLMRobertaAttention(\n", + " (self): XLMRobertaSdpaSelfAttention(\n", + " (query): Linear(in_features=1024, out_features=1024, bias=True)\n", + " (key): Linear(in_features=1024, out_features=1024, bias=True)\n", + " (value): Linear(in_features=1024, out_features=1024, bias=True)\n", + " (dropout): Dropout(p=0.1, inplace=False)\n", + " )\n", + " (output): XLMRobertaSelfOutput(\n", + " (dense): Linear(in_features=1024, out_features=1024, bias=True)\n", + " (LayerNorm): LayerNorm((1024,), eps=1e-05, elementwise_affine=True)\n", + " (dropout): Dropout(p=0.1, inplace=False)\n", + " )\n", + " )\n", + " (intermediate): XLMRobertaIntermediate(\n", + " (dense): Linear(in_features=1024, out_features=4096, bias=True)\n", + " (intermediate_act_fn): GELUActivation()\n", + " )\n", + " (output): XLMRobertaOutput(\n", + " (dense): Linear(in_features=4096, out_features=1024, bias=True)\n", + " (LayerNorm): LayerNorm((1024,), eps=1e-05, elementwise_affine=True)\n", + " (dropout): Dropout(p=0.1, inplace=False)\n", + " )\n", + " )\n", + " )\n", + " )\n", + " )\n", + " (dropout): Dropout(p=0.1, inplace=False)\n", + " (classifier): Linear(in_features=1024, out_features=21, bias=True)\n", + ")" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 13 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T13:56:11.336735Z", + "start_time": "2025-06-27T13:56:11.333504Z" + } + }, + "cell_type": "code", + "source": "len(comments.iloc[val_indices].comment.tolist())", + "id": "b329419ee6bef88f", + "outputs": [ + { + "data": { + "text/plain": [ + "3706" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 15 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T13:56:11.701394Z", + "start_time": "2025-06-27T13:56:11.698639Z" + } + }, + "cell_type": "code", + "source": [ + "comments_val = comments.iloc[val_indices].copy()\n", + "comments_val.reset_index(drop=True, inplace=True)" + ], + "id": "ee60bf8540dd6bc5", + "outputs": [], + "execution_count": 16 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T13:58:36.920980Z", + "start_time": "2025-06-27T13:56:20.046689Z" + } + }, + "cell_type": "code", + "source": [ + "test_comments = comments_val.copy()\n", + "\n", + "comments_val['gold_spans'] = None\n", + "comments_val['predicted_labels'] = None\n", + "comments_val['predicted_probs'] = None\n", + "comments_val['offset_mapping'] = None\n", + "comments_val['text_tokens'] = None\n", + "\n", + "for idx in range(len(comments_val)): #range(15):\n", + " row = comments_val.iloc[idx]\n", + " text = row['comment']\n", + " key = (row['document'], row['comment_id'])\n", + "\n", + " text_tokens = classifier.tokenizer.tokenize(text)\n", + " comments_val.at[idx, 'text_tokens'] = text_tokens\n", + "\n", + " device = next(classifier.model.parameters()).device\n", + " inputs = classifier.tokenizer(text, return_tensors=\"pt\", truncation=True, max_length=512, return_offsets_mapping=True)\n", + "\n", + " offset_mapping = inputs.pop('offset_mapping')\n", + " comments_val.at[idx, 'offset_mapping'] = offset_mapping.cpu().numpy()[0].tolist()\n", + " inputs = {k: v.to(device) for k, v in inputs.items()}\n", + "\n", + " # Vorhersage\n", + " with torch.no_grad():\n", + " outputs = classifier.model(**inputs)\n", + "\n", + " predicted_labels = torch.argmax(outputs.logits, dim=2)[0].cpu().numpy()\n", + " predicted_probs = torch.nn.functional.softmax(outputs.logits, dim=2)[0].cpu().numpy()\n", + " comments_val.at[idx, 'predicted_labels'] = predicted_labels\n", + " comments_val.at[idx, 'predicted_probs'] = predicted_probs\n", + "\n", + " if key not in spans_grouped.groups:\n", + " comments_val.at[idx, 'gold_spans'] = []\n", + " pass\n", + " else:\n", + " spans = spans_grouped.get_group(key).to_dict(orient='records')\n", + " comments_val.at[idx, 'gold_spans'] = spans\n", + "\n", + "# or simply predict like this witout probabilities:\n", + "# val_set_predictions = classifier.predict(comments_val.comment.tolist())" + ], + "id": "a151edc97f745098", + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Token indices sequence length is longer than the specified maximum sequence length for this model (1035 > 512). Running this sequence through the model will result in indexing errors\n" + ] + } + ], + "execution_count": 17 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T14:13:01.739630Z", + "start_time": "2025-06-27T14:13:01.630996Z" + } + }, + "cell_type": "code", + "source": [ + "comments_val['predicted_spans'] = comments_val.apply(pred_to_spans, axis=1, result_type='expand')\n", + "\n", + "test_gold_spans = pd.DataFrame((comments_val['gold_spans'].explode().dropna().tolist()))\n", + "test_baseline_spans = pd.DataFrame(comments_val.apply(convert_spans, axis=1).explode().dropna().tolist())\n", + "print(f\"F1 on ES data before postprocessing {fine_grained_flausch_by_label(test_gold_spans, test_baseline_spans)['TOTAL']['STRICT']}\")" + ], + "id": "6fdc40f482ad548a", + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "F1 on ES data before postprocessing {'prec': 0.7364043506078055, 'rec': 0.7587343441001978, 'f1': 0.7474025974025974}\n" + ] + } + ], + "execution_count": 36 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T13:58:37.434614Z", + "start_time": "2025-06-27T13:58:37.267648Z" + } + }, + "cell_type": "code", + "source": [ + "def build_spans_from_classification(tokens, classification, offset_mapping):\n", + " \"\"\"Modified version to work with string tokens and offset mapping\"\"\"\n", + " res = []\n", + " searching_end = False\n", + " temp_res = []\n", + " trunc_count = 0\n", + " skip_count = 0\n", + "\n", + " for i, el in enumerate(classification):\n", + " # Skip special tokens like CLS, SEP\n", + " if i >= len(offset_mapping) or offset_mapping[i][0] is None:\n", + " continue\n", + "\n", + " token_start, token_end = offset_mapping[i]\n", + "\n", + " if el == 'O' and searching_end is True:\n", + " if i != 0 and i != len(tokens) - 1 and tokens[i].startswith('##'):\n", + " continue\n", + " if i > 0:\n", + " prev_end = offset_mapping[i-1][1]\n", + " temp_res[1] = prev_end\n", + " else:\n", + " temp_res[1] = -1\n", + " res.append(temp_res)\n", + " searching_end = False\n", + "\n", + " elif el.startswith('B-'):\n", + " if i != 0 and i != len(tokens) - 1 and tokens[i].startswith('##'):\n", + " continue\n", + " if searching_end is True:\n", + " if i > 0:\n", + " prev_end = offset_mapping[i-1][1]\n", + " temp_res[1] = prev_end\n", + " else:\n", + " temp_res[1] = -1\n", + " res.append(temp_res)\n", + " trunc_count += 1\n", + " split = el.split('-', 1)\n", + " label_type = split[1]\n", + " temp_res = [token_start, -1, label_type, \"\"] # Changed structure\n", + " searching_end = True\n", + "\n", + " elif el.startswith('I-'):\n", + " if searching_end is True and i != 0 and i != len(tokens) - 1 and tokens[i].startswith('##'):\n", + " continue\n", + " split = el.split('-', 1)\n", + " label_type = split[1]\n", + " if searching_end is True and label_type != temp_res[2]:\n", + " if i > 0:\n", + " prev_end = offset_mapping[i-1][1]\n", + " temp_res[1] = prev_end\n", + " else:\n", + " temp_res[1] = -1\n", + " res.append(temp_res)\n", + " searching_end = False\n", + " trunc_count += 1\n", + " elif searching_end is False:\n", + " skip_count += 1\n", + "\n", + " if searching_end is True and len(offset_mapping) > 1:\n", + " temp_res[1] = offset_mapping[-1][1]\n", + " res.append(temp_res)\n", + "\n", + " return res, skip_count, trunc_count\n", + "\n", + "def apply_span_classification(row):\n", + " \"\"\"Apply classification to the tokens and return spans.\"\"\"\n", + " tokens = ['[CLS]', *row['text_tokens'], '[SEP]']\n", + " classification = row['predicted_labels']\n", + " offset_mapping = row['offset_mapping']\n", + "\n", + " # Convert classification to BIO format\n", + " bio_labels = [classifier.id2label[label] for label in classification]\n", + "\n", + " print(len(tokens), len(bio_labels), len(offset_mapping))\n", + " spans, skip_count, trunc_count = build_spans_from_classification(tokens, bio_labels, offset_mapping)\n", + "\n", + " document = row['document']\n", + " comment_id = row['comment_id']\n", + "\n", + " return [{'document': document, 'comment_id': comment_id, 'type': span[2], 'start': span[0], 'end': span[1]}\n", + " for span in spans if span[0] != -1 and span[1] != -1]\n", + "\n", + "ge2017_rules_test_pred_spans = pd.DataFrame(comments_val.apply(apply_span_classification, axis=1).explode().dropna().tolist())" + ], + "id": "594bfe12f7106015", + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "8 8 8\n", + "19 19 19\n", + "13 13 13\n", + "53 53 53\n", + "33 33 33\n", + "28 28 28\n", + "9 9 9\n", + "63 63 63\n", + "18 18 18\n", + "64 64 64\n", + "92 92 92\n", + "3 3 3\n", + "4 4 4\n", + "14 14 14\n", + "15 15 15\n", + "12 12 12\n", + "8 8 8\n", + "14 14 14\n", + "59 59 59\n", + "3 3 3\n", + "7 7 7\n", + "25 25 25\n", + "8 8 8\n", + "9 9 9\n", + "28 28 28\n", + "8 8 8\n", + "14 14 14\n", + "28 28 28\n", + "4 4 4\n", + "44 44 44\n", + "18 18 18\n", + "7 7 7\n", + "11 11 11\n", + "16 16 16\n", + "19 19 19\n", + "4 4 4\n", + "54 54 54\n", + "4 4 4\n", + "17 17 17\n", + "8 8 8\n", + "14 14 14\n", + "6 6 6\n", + "7 7 7\n", + "19 19 19\n", + "15 15 15\n", + "46 46 46\n", + "20 20 20\n", + "29 29 29\n", + "22 22 22\n", + "18 18 18\n", + "18 18 18\n", + "13 13 13\n", + "35 35 35\n", + "9 9 9\n", + "14 14 14\n", + "86 86 86\n", + "7 7 7\n", + "6 6 6\n", + "25 25 25\n", + "7 7 7\n", + "48 48 48\n", + "6 6 6\n", + "69 69 69\n", + "9 9 9\n", + "4 4 4\n", + "7 7 7\n", + "33 33 33\n", + "18 18 18\n", + "48 48 48\n", + "36 36 36\n", + "8 8 8\n", + "5 5 5\n", + "25 25 25\n", + "4 4 4\n", + "14 14 14\n", + "6 6 6\n", + "10 10 10\n", + "16 16 16\n", + "32 32 32\n", + "30 30 30\n", + "10 10 10\n", + "24 24 24\n", + "18 18 18\n", + "12 12 12\n", + "6 6 6\n", + "14 14 14\n", + "140 140 140\n", + "8 8 8\n", + "8 8 8\n", + "8 8 8\n", + "6 6 6\n", + "8 8 8\n", + "10 10 10\n", + "18 18 18\n", + "9 9 9\n", + "5 5 5\n", + "28 28 28\n", + "10 10 10\n", + "9 9 9\n", + "5 5 5\n", + "23 23 23\n", + "4 4 4\n", + "7 7 7\n", + "5 5 5\n", + "31 31 31\n", + "9 9 9\n", + "20 20 20\n", + "4 4 4\n", + "12 12 12\n", + "16 16 16\n", + "8 8 8\n", + "9 9 9\n", + "19 19 19\n", + "21 21 21\n", + "5 5 5\n", + "8 8 8\n", + "4 4 4\n", + "3 3 3\n", + "7 7 7\n", + "11 11 11\n", + "18 18 18\n", + "27 27 27\n", + "6 6 6\n", + "3 3 3\n", + "22 22 22\n", + "17 17 17\n", + "10 10 10\n", + "5 5 5\n", + "63 63 63\n", + "35 35 35\n", + "8 8 8\n", + "23 23 23\n", + "24 24 24\n", + "13 13 13\n", + "3 3 3\n", + "32 32 32\n", + "25 25 25\n", + "3 3 3\n", + "19 19 19\n", + "18 18 18\n", + "115 115 115\n", + "15 15 15\n", + "5 5 5\n", + "103 103 103\n", + "7 7 7\n", + "13 13 13\n", + "9 9 9\n", + "19 19 19\n", + "7 7 7\n", + "16 16 16\n", + "9 9 9\n", + "4 4 4\n", + "18 18 18\n", + "4 4 4\n", + "7 7 7\n", + "17 17 17\n", + "15 15 15\n", + "6 6 6\n", + "9 9 9\n", + "45 45 45\n", + "48 48 48\n", + "4 4 4\n", + "20 20 20\n", + "9 9 9\n", + "12 12 12\n", + "66 66 66\n", + "24 24 24\n", + "4 4 4\n", + "6 6 6\n", + "9 9 9\n", + "60 60 60\n", + "35 35 35\n", + "12 12 12\n", + "4 4 4\n", + "8 8 8\n", + "9 9 9\n", + "37 37 37\n", + "26 26 26\n", + "18 18 18\n", + "4 4 4\n", + "66 66 66\n", + "14 14 14\n", + "9 9 9\n", + "7 7 7\n", + "9 9 9\n", + "7 7 7\n", + "19 19 19\n", + "11 11 11\n", + "6 6 6\n", + "8 8 8\n", + "13 13 13\n", + "7 7 7\n", + "4 4 4\n", + "51 51 51\n", + "17 17 17\n", + "22 22 22\n", + "11 11 11\n", + "17 17 17\n", + "8 8 8\n", + "9 9 9\n", + "8 8 8\n", + "7 7 7\n", + "9 9 9\n", + "3 3 3\n", + "3 3 3\n", + "7 7 7\n", + "7 7 7\n", + "36 36 36\n", + "13 13 13\n", + "22 22 22\n", + "5 5 5\n", + "6 6 6\n", + "6 6 6\n", + "19 19 19\n", + "145 145 145\n", + "5 5 5\n", + "11 11 11\n", + "24 24 24\n", + "16 16 16\n", + "45 45 45\n", + "14 14 14\n", + "15 15 15\n", + "12 12 12\n", + "13 13 13\n", + "48 48 48\n", + "26 26 26\n", + "13 13 13\n", + "4 4 4\n", + "6 6 6\n", + "6 6 6\n", + "61 61 61\n", + "17 17 17\n", + "19 19 19\n", + "25 25 25\n", + "39 39 39\n", + "4 4 4\n", + "9 9 9\n", + "19 19 19\n", + "11 11 11\n", + "7 7 7\n", + "14 14 14\n", + "8 8 8\n", + "37 37 37\n", + "7 7 7\n", + "31 31 31\n", + "7 7 7\n", + "10 10 10\n", + "10 10 10\n", + "9 9 9\n", + "25 25 25\n", + "80 80 80\n", + "3 3 3\n", + "7 7 7\n", + "22 22 22\n", + "4 4 4\n", + "15 15 15\n", + "3 3 3\n", + "11 11 11\n", + "4 4 4\n", + "5 5 5\n", + "37 37 37\n", + "11 11 11\n", + "4 4 4\n", + "9 9 9\n", + "21 21 21\n", + "16 16 16\n", + "18 18 18\n", + "20 20 20\n", + "11 11 11\n", + "13 13 13\n", + "12 12 12\n", + "7 7 7\n", + "37 37 37\n", + "24 24 24\n", + "22 22 22\n", + "15 15 15\n", + "65 65 65\n", + "20 20 20\n", + "6 6 6\n", + "123 123 123\n", + "11 11 11\n", + "18 18 18\n", + "13 13 13\n", + "23 23 23\n", + "22 22 22\n", + "10 10 10\n", + "18 18 18\n", + "13 13 13\n", + "11 11 11\n", + "3 3 3\n", + "14 14 14\n", + "56 56 56\n", + "12 12 12\n", + "11 11 11\n", + "36 36 36\n", + "8 8 8\n", + "4 4 4\n", + "3 3 3\n", + "6 6 6\n", + "10 10 10\n", + "26 26 26\n", + "4 4 4\n", + "6 6 6\n", + "5 5 5\n", + "5 5 5\n", + "7 7 7\n", + "11 11 11\n", + "26 26 26\n", + "13 13 13\n", + "4 4 4\n", + "6 6 6\n", + "17 17 17\n", + "5 5 5\n", + "9 9 9\n", + "4 4 4\n", + "10 10 10\n", + "9 9 9\n", + "76 76 76\n", + "9 9 9\n", + "55 55 55\n", + "55 55 55\n", + "22 22 22\n", + "10 10 10\n", + "8 8 8\n", + "10 10 10\n", + "14 14 14\n", + "5 5 5\n", + "29 29 29\n", + "43 43 43\n", + "14 14 14\n", + "45 45 45\n", + "11 11 11\n", + "15 15 15\n", + "5 5 5\n", + "10 10 10\n", + "19 19 19\n", + "20 20 20\n", + "8 8 8\n", + "4 4 4\n", + "7 7 7\n", + "5 5 5\n", + "28 28 28\n", + "18 18 18\n", + "23 23 23\n", + "23 23 23\n", + "18 18 18\n", + "9 9 9\n", + "6 6 6\n", + "3 3 3\n", + "13 13 13\n", + "11 11 11\n", + "11 11 11\n", + "12 12 12\n", + "38 38 38\n", + "32 32 32\n", + "61 61 61\n", + "20 20 20\n", + "10 10 10\n", + "3 3 3\n", + "17 17 17\n", + "17 17 17\n", + "9 9 9\n", + "45 45 45\n", + "11 11 11\n", + "7 7 7\n", + "8 8 8\n", + "15 15 15\n", + "19 19 19\n", + "4 4 4\n", + "8 8 8\n", + "20 20 20\n", + "53 53 53\n", + "14 14 14\n", + "18 18 18\n", + "12 12 12\n", + "16 16 16\n", + "4 4 4\n", + "8 8 8\n", + "4 4 4\n", + "10 10 10\n", + "23 23 23\n", + "16 16 16\n", + "27 27 27\n", + "10 10 10\n", + "8 8 8\n", + "9 9 9\n", + "6 6 6\n", + "50 50 50\n", + "55 55 55\n", + "11 11 11\n", + "8 8 8\n", + "14 14 14\n", + "12 12 12\n", + "12 12 12\n", + "47 47 47\n", + "11 11 11\n", + "4 4 4\n", + "4 4 4\n", + "102 102 102\n", + "5 5 5\n", + "13 13 13\n", + "7 7 7\n", + "6 6 6\n", + "154 154 154\n", + "3 3 3\n", + "4 4 4\n", + "9 9 9\n", + "22 22 22\n", + "14 14 14\n", + "16 16 16\n", + "15 15 15\n", + "4 4 4\n", + "18 18 18\n", + "16 16 16\n", + "30 30 30\n", + "11 11 11\n", + "20 20 20\n", + "27 27 27\n", + "22 22 22\n", + "8 8 8\n", + "22 22 22\n", + "9 9 9\n", + "21 21 21\n", + "4 4 4\n", + "4 4 4\n", + "9 9 9\n", + "4 4 4\n", + "12 12 12\n", + "27 27 27\n", + "9 9 9\n", + "9 9 9\n", + "3 3 3\n", + "16 16 16\n", + "10 10 10\n", + "15 15 15\n", + "24 24 24\n", + "35 35 35\n", + "4 4 4\n", + "66 66 66\n", + "12 12 12\n", + "14 14 14\n", + "8 8 8\n", + "23 23 23\n", + "13 13 13\n", + "17 17 17\n", + "13 13 13\n", + "77 77 77\n", + "31 31 31\n", + "9 9 9\n", + "37 37 37\n", + "3 3 3\n", + "23 23 23\n", + "67 67 67\n", + "17 17 17\n", + "23 23 23\n", + "26 26 26\n", + "19 19 19\n", + "134 134 134\n", + "13 13 13\n", + "13 13 13\n", + "5 5 5\n", + "18 18 18\n", + "6 6 6\n", + "53 53 53\n", + "18 18 18\n", + "124 124 124\n", + "10 10 10\n", + "36 36 36\n", + "9 9 9\n", + "15 15 15\n", + "6 6 6\n", + "10 10 10\n", + "12 12 12\n", + "3 3 3\n", + "22 22 22\n", + "9 9 9\n", + "5 5 5\n", + "26 26 26\n", + "6 6 6\n", + "4 4 4\n", + "8 8 8\n", + "32 32 32\n", + "6 6 6\n", + "22 22 22\n", + "14 14 14\n", + "6 6 6\n", + "17 17 17\n", + "53 53 53\n", + "16 16 16\n", + "8 8 8\n", + "33 33 33\n", + "7 7 7\n", + "13 13 13\n", + "32 32 32\n", + "8 8 8\n", + "18 18 18\n", + "4 4 4\n", + "11 11 11\n", + "4 4 4\n", + "64 64 64\n", + "33 33 33\n", + "102 102 102\n", + "19 19 19\n", + "3 3 3\n", + "20 20 20\n", + "31 31 31\n", + "3 3 3\n", + "7 7 7\n", + "9 9 9\n", + "5 5 5\n", + "16 16 16\n", + "33 33 33\n", + "18 18 18\n", + "12 12 12\n", + "35 35 35\n", + "5 5 5\n", + "9 9 9\n", + "12 12 12\n", + "32 32 32\n", + "4 4 4\n", + "6 6 6\n", + "94 94 94\n", + "4 4 4\n", + "14 14 14\n", + "9 9 9\n", + "4 4 4\n", + "11 11 11\n", + "7 7 7\n", + "5 5 5\n", + "8 8 8\n", + "9 9 9\n", + "4 4 4\n", + "12 12 12\n", + "26 26 26\n", + "4 4 4\n", + "7 7 7\n", + "8 8 8\n", + "5 5 5\n", + "11 11 11\n", + "7 7 7\n", + "95 95 95\n", + "12 12 12\n", + "86 86 86\n", + "8 8 8\n", + "8 8 8\n", + "77 77 77\n", + "34 34 34\n", + "133 133 133\n", + "36 36 36\n", + "68 68 68\n", + "7 7 7\n", + "19 19 19\n", + "6 6 6\n", + "4 4 4\n", + "14 14 14\n", + "12 12 12\n", + "20 20 20\n", + "7 7 7\n", + "24 24 24\n", + "7 7 7\n", + "26 26 26\n", + "8 8 8\n", + "32 32 32\n", + "21 21 21\n", + "31 31 31\n", + "8 8 8\n", + "4 4 4\n", + "4 4 4\n", + "16 16 16\n", + "6 6 6\n", + "14 14 14\n", + "28 28 28\n", + "13 13 13\n", + "5 5 5\n", + "21 21 21\n", + "20 20 20\n", + "9 9 9\n", + "19 19 19\n", + "21 21 21\n", + "14 14 14\n", + "17 17 17\n", + "8 8 8\n", + "6 6 6\n", + "14 14 14\n", + "3 3 3\n", + "6 6 6\n", + "79 79 79\n", + "46 46 46\n", + "4 4 4\n", + "8 8 8\n", + "44 44 44\n", + "25 25 25\n", + "32 32 32\n", + "12 12 12\n", + "6 6 6\n", + "28 28 28\n", + "8 8 8\n", + "26 26 26\n", + "30 30 30\n", + "15 15 15\n", + "27 27 27\n", + "27 27 27\n", + "42 42 42\n", + "17 17 17\n", + "3 3 3\n", + "25 25 25\n", + "10 10 10\n", + "8 8 8\n", + "15 15 15\n", + "10 10 10\n", + "17 17 17\n", + "9 9 9\n", + "19 19 19\n", + "12 12 12\n", + "30 30 30\n", + "4 4 4\n", + "4 4 4\n", + "14 14 14\n", + "24 24 24\n", + "4 4 4\n", + "13 13 13\n", + "6 6 6\n", + "14 14 14\n", + "36 36 36\n", + "45 45 45\n", + "13 13 13\n", + "62 62 62\n", + "8 8 8\n", + "44 44 44\n", + "9 9 9\n", + "28 28 28\n", + "11 11 11\n", + "5 5 5\n", + "161 161 161\n", + "17 17 17\n", + "8 8 8\n", + "7 7 7\n", + "6 6 6\n", + "42 42 42\n", + "9 9 9\n", + "16 16 16\n", + "8 8 8\n", + "20 20 20\n", + "9 9 9\n", + "8 8 8\n", + "18 18 18\n", + "20 20 20\n", + "13 13 13\n", + "30 30 30\n", + "11 11 11\n", + "6 6 6\n", + "9 9 9\n", + "47 47 47\n", + "7 7 7\n", + "4 4 4\n", + "30 30 30\n", + "10 10 10\n", + "4 4 4\n", + "18 18 18\n", + "9 9 9\n", + "12 12 12\n", + "10 10 10\n", + "37 37 37\n", + "12 12 12\n", + "22 22 22\n", + "3 3 3\n", + "42 42 42\n", + "15 15 15\n", + "4 4 4\n", + "49 49 49\n", + "69 69 69\n", + "50 50 50\n", + "5 5 5\n", + "11 11 11\n", + "18 18 18\n", + "22 22 22\n", + "8 8 8\n", + "13 13 13\n", + "18 18 18\n", + "21 21 21\n", + "31 31 31\n", + "9 9 9\n", + "5 5 5\n", + "8 8 8\n", + "34 34 34\n", + "7 7 7\n", + "15 15 15\n", + "9 9 9\n", + "26 26 26\n", + "23 23 23\n", + "91 91 91\n", + "6 6 6\n", + "20 20 20\n", + "61 61 61\n", + "284 284 284\n", + "5 5 5\n", + "4 4 4\n", + "51 51 51\n", + "8 8 8\n", + "9 9 9\n", + "9 9 9\n", + "8 8 8\n", + "12 12 12\n", + "6 6 6\n", + "36 36 36\n", + "20 20 20\n", + "4 4 4\n", + "12 12 12\n", + "27 27 27\n", + "20 20 20\n", + "6 6 6\n", + "9 9 9\n", + "16 16 16\n", + "19 19 19\n", + "4 4 4\n", + "6 6 6\n", + "10 10 10\n", + "34 34 34\n", + "9 9 9\n", + "149 149 149\n", + "26 26 26\n", + "5 5 5\n", + "19 19 19\n", + "4 4 4\n", + "11 11 11\n", + "24 24 24\n", + "22 22 22\n", + "25 25 25\n", + "13 13 13\n", + "13 13 13\n", + "14 14 14\n", + "12 12 12\n", + "4 4 4\n", + "10 10 10\n", + "27 27 27\n", + "6 6 6\n", + "7 7 7\n", + "14 14 14\n", + "9 9 9\n", + "58 58 58\n", + "21 21 21\n", + "13 13 13\n", + "10 10 10\n", + "6 6 6\n", + "14 14 14\n", + "29 29 29\n", + "14 14 14\n", + "51 51 51\n", + "6 6 6\n", + "7 7 7\n", + "38 38 38\n", + "16 16 16\n", + "15 15 15\n", + "15 15 15\n", + "7 7 7\n", + "16 16 16\n", + "4 4 4\n", + "8 8 8\n", + "13 13 13\n", + "8 8 8\n", + "11 11 11\n", + "3 3 3\n", + "42 42 42\n", + "10 10 10\n", + "5 5 5\n", + "21 21 21\n", + "14 14 14\n", + "4 4 4\n", + "53 53 53\n", + "10 10 10\n", + "30 30 30\n", + "7 7 7\n", + "14 14 14\n", + "10 10 10\n", + "13 13 13\n", + "13 13 13\n", + "4 4 4\n", + "6 6 6\n", + "27 27 27\n", + "22 22 22\n", + "6 6 6\n", + "10 10 10\n", + "12 12 12\n", + "13 13 13\n", + "11 11 11\n", + "4 4 4\n", + "35 35 35\n", + "29 29 29\n", + "11 11 11\n", + "11 11 11\n", + "24 24 24\n", + "14 14 14\n", + "8 8 8\n", + "14 14 14\n", + "30 30 30\n", + "12 12 12\n", + "17 17 17\n", + "18 18 18\n", + "7 7 7\n", + "16 16 16\n", + "17 17 17\n", + "5 5 5\n", + "14 14 14\n", + "15 15 15\n", + "3 3 3\n", + "15 15 15\n", + "6 6 6\n", + "29 29 29\n", + "14 14 14\n", + "8 8 8\n", + "4 4 4\n", + "26 26 26\n", + "7 7 7\n", + "15 15 15\n", + "7 7 7\n", + "39 39 39\n", + "15 15 15\n", + "11 11 11\n", + "10 10 10\n", + "29 29 29\n", + "7 7 7\n", + "7 7 7\n", + "22 22 22\n", + "17 17 17\n", + "10 10 10\n", + "32 32 32\n", + "9 9 9\n", + "4 4 4\n", + "10 10 10\n", + "11 11 11\n", + "4 4 4\n", + "10 10 10\n", + "8 8 8\n", + "7 7 7\n", + "9 9 9\n", + "15 15 15\n", + "26 26 26\n", + "22 22 22\n", + "6 6 6\n", + "4 4 4\n", + "5 5 5\n", + "4 4 4\n", + "5 5 5\n", + "4 4 4\n", + "15 15 15\n", + "19 19 19\n", + "44 44 44\n", + "29 29 29\n", + "10 10 10\n", + "34 34 34\n", + "20 20 20\n", + "12 12 12\n", + "11 11 11\n", + "8 8 8\n", + "22 22 22\n", + "12 12 12\n", + "12 12 12\n", + "14 14 14\n", + "13 13 13\n", + "14 14 14\n", + "120 120 120\n", + "3 3 3\n", + "8 8 8\n", + "14 14 14\n", + "15 15 15\n", + "9 9 9\n", + "22 22 22\n", + "11 11 11\n", + "29 29 29\n", + "41 41 41\n", + "5 5 5\n", + "10 10 10\n", + "11 11 11\n", + "45 45 45\n", + "14 14 14\n", + "46 46 46\n", + "8 8 8\n", + "12 12 12\n", + "19 19 19\n", + "8 8 8\n", + "60 60 60\n", + "153 153 153\n", + "16 16 16\n", + "4 4 4\n", + "17 17 17\n", + "11 11 11\n", + "10 10 10\n", + "19 19 19\n", + "23 23 23\n", + "5 5 5\n", + "15 15 15\n", + "3 3 3\n", + "4 4 4\n", + "18 18 18\n", + "4 4 4\n", + "38 38 38\n", + "17 17 17\n", + "4 4 4\n", + "17 17 17\n", + "8 8 8\n", + "11 11 11\n", + "21 21 21\n", + "4 4 4\n", + "36 36 36\n", + "60 60 60\n", + "9 9 9\n", + "18 18 18\n", + "6 6 6\n", + "12 12 12\n", + "11 11 11\n", + "4 4 4\n", + "60 60 60\n", + "70 70 70\n", + "12 12 12\n", + "10 10 10\n", + "12 12 12\n", + "8 8 8\n", + "16 16 16\n", + "6 6 6\n", + "17 17 17\n", + "45 45 45\n", + "6 6 6\n", + "12 12 12\n", + "9 9 9\n", + "6 6 6\n", + "8 8 8\n", + "15 15 15\n", + "19 19 19\n", + "13 13 13\n", + "9 9 9\n", + "21 21 21\n", + "11 11 11\n", + "15 15 15\n", + "23 23 23\n", + "18 18 18\n", + "27 27 27\n", + "12 12 12\n", + "9 9 9\n", + "10 10 10\n", + "10 10 10\n", + "5 5 5\n", + "8 8 8\n", + "27 27 27\n", + "16 16 16\n", + "19 19 19\n", + "29 29 29\n", + "10 10 10\n", + "9 9 9\n", + "6 6 6\n", + "6 6 6\n", + "41 41 41\n", + "19 19 19\n", + "6 6 6\n", + "5 5 5\n", + "10 10 10\n", + "49 49 49\n", + "13 13 13\n", + "16 16 16\n", + "10 10 10\n", + "18 18 18\n", + "6 6 6\n", + "19 19 19\n", + "10 10 10\n", + "4 4 4\n", + "55 55 55\n", + "8 8 8\n", + "12 12 12\n", + "15 15 15\n", + "9 9 9\n", + "16 16 16\n", + "13 13 13\n", + "23 23 23\n", + "21 21 21\n", + "15 15 15\n", + "11 11 11\n", + "19 19 19\n", + "11 11 11\n", + "6 6 6\n", + "12 12 12\n", + "21 21 21\n", + "14 14 14\n", + "17 17 17\n", + "5 5 5\n", + "15 15 15\n", + "23 23 23\n", + "12 12 12\n", + "7 7 7\n", + "12 12 12\n", + "19 19 19\n", + "4 4 4\n", + "7 7 7\n", + "21 21 21\n", + "16 16 16\n", + "13 13 13\n", + "44 44 44\n", + "16 16 16\n", + "29 29 29\n", + "12 12 12\n", + "6 6 6\n", + "10 10 10\n", + "9 9 9\n", + "8 8 8\n", + "29 29 29\n", + "45 45 45\n", + "24 24 24\n", + "4 4 4\n", + "41 41 41\n", + "7 7 7\n", + "5 5 5\n", + "13 13 13\n", + "3 3 3\n", + "6 6 6\n", + "6 6 6\n", + "13 13 13\n", + "40 40 40\n", + "5 5 5\n", + "20 20 20\n", + "4 4 4\n", + "37 37 37\n", + "43 43 43\n", + "9 9 9\n", + "12 12 12\n", + "27 27 27\n", + "24 24 24\n", + "16 16 16\n", + "17 17 17\n", + "59 59 59\n", + "7 7 7\n", + "17 17 17\n", + "37 37 37\n", + "7 7 7\n", + "4 4 4\n", + "4 4 4\n", + "4 4 4\n", + "15 15 15\n", + "9 9 9\n", + "7 7 7\n", + "13 13 13\n", + "29 29 29\n", + "5 5 5\n", + "5 5 5\n", + "22 22 22\n", + "27 27 27\n", + "81 81 81\n", + "8 8 8\n", + "12 12 12\n", + "12 12 12\n", + "61 61 61\n", + "19 19 19\n", + "12 12 12\n", + "21 21 21\n", + "6 6 6\n", + "14 14 14\n", + "17 17 17\n", + "9 9 9\n", + "22 22 22\n", + "91 91 91\n", + "11 11 11\n", + "7 7 7\n", + "9 9 9\n", + "26 26 26\n", + "8 8 8\n", + "6 6 6\n", + "17 17 17\n", + "10 10 10\n", + "3 3 3\n", + "11 11 11\n", + "12 12 12\n", + "16 16 16\n", + "12 12 12\n", + "3 3 3\n", + "23 23 23\n", + "30 30 30\n", + "17 17 17\n", + "5 5 5\n", + "11 11 11\n", + "53 53 53\n", + "3 3 3\n", + "23 23 23\n", + "10 10 10\n", + "15 15 15\n", + "32 32 32\n", + "45 45 45\n", + "7 7 7\n", + "36 36 36\n", + "11 11 11\n", + "14 14 14\n", + "8 8 8\n", + "5 5 5\n", + "25 25 25\n", + "5 5 5\n", + "13 13 13\n", + "6 6 6\n", + "11 11 11\n", + "6 6 6\n", + "10 10 10\n", + "6 6 6\n", + "11 11 11\n", + "48 48 48\n", + "8 8 8\n", + "21 21 21\n", + "24 24 24\n", + "8 8 8\n", + "16 16 16\n", + "72 72 72\n", + "7 7 7\n", + "25 25 25\n", + "17 17 17\n", + "5 5 5\n", + "4 4 4\n", + "14 14 14\n", + "8 8 8\n", + "25 25 25\n", + "19 19 19\n", + "7 7 7\n", + "3 3 3\n", + "5 5 5\n", + "14 14 14\n", + "29 29 29\n", + "9 9 9\n", + "7 7 7\n", + "16 16 16\n", + "45 45 45\n", + "4 4 4\n", + "7 7 7\n", + "4 4 4\n", + "32 32 32\n", + "21 21 21\n", + "32 32 32\n", + "9 9 9\n", + "17 17 17\n", + "12 12 12\n", + "12 12 12\n", + "56 56 56\n", + "21 21 21\n", + "29 29 29\n", + "9 9 9\n", + "5 5 5\n", + "4 4 4\n", + "17 17 17\n", + "10 10 10\n", + "7 7 7\n", + "7 7 7\n", + "8 8 8\n", + "26 26 26\n", + "18 18 18\n", + "8 8 8\n", + "3 3 3\n", + "6 6 6\n", + "6 6 6\n", + "18 18 18\n", + "6 6 6\n", + "11 11 11\n", + "13 13 13\n", + "4 4 4\n", + "13 13 13\n", + "12 12 12\n", + "34 34 34\n", + "61 61 61\n", + "38 38 38\n", + "15 15 15\n", + "20 20 20\n", + "5 5 5\n", + "4 4 4\n", + "4 4 4\n", + "12 12 12\n", + "4 4 4\n", + "12 12 12\n", + "7 7 7\n", + "5 5 5\n", + "10 10 10\n", + "4 4 4\n", + "7 7 7\n", + "7 7 7\n", + "20 20 20\n", + "47 47 47\n", + "30 30 30\n", + "21 21 21\n", + "16 16 16\n", + "13 13 13\n", + "4 4 4\n", + "34 34 34\n", + "36 36 36\n", + "13 13 13\n", + "24 24 24\n", + "12 12 12\n", + "5 5 5\n", + "3 3 3\n", + "10 10 10\n", + "15 15 15\n", + "13 13 13\n", + "34 34 34\n", + "6 6 6\n", + "15 15 15\n", + "91 91 91\n", + "18 18 18\n", + "20 20 20\n", + "46 46 46\n", + "3 3 3\n", + "18 18 18\n", + "8 8 8\n", + "14 14 14\n", + "12 12 12\n", + "14 14 14\n", + "4 4 4\n", + "33 33 33\n", + "10 10 10\n", + "34 34 34\n", + "22 22 22\n", + "4 4 4\n", + "4 4 4\n", + "15 15 15\n", + "11 11 11\n", + "17 17 17\n", + "28 28 28\n", + "31 31 31\n", + "33 33 33\n", + "93 93 93\n", + "5 5 5\n", + "11 11 11\n", + "60 60 60\n", + "11 11 11\n", + "13 13 13\n", + "11 11 11\n", + "8 8 8\n", + "25 25 25\n", + "12 12 12\n", + "16 16 16\n", + "57 57 57\n", + "20 20 20\n", + "125 125 125\n", + "13 13 13\n", + "9 9 9\n", + "20 20 20\n", + "13 13 13\n", + "13 13 13\n", + "4 4 4\n", + "9 9 9\n", + "33 33 33\n", + "4 4 4\n", + "37 37 37\n", + "10 10 10\n", + "14 14 14\n", + "21 21 21\n", + "4 4 4\n", + "115 115 115\n", + "9 9 9\n", + "9 9 9\n", + "11 11 11\n", + "16 16 16\n", + "15 15 15\n", + "7 7 7\n", + "16 16 16\n", + "16 16 16\n", + "6 6 6\n", + "16 16 16\n", + "12 12 12\n", + "8 8 8\n", + "27 27 27\n", + "32 32 32\n", + "16 16 16\n", + "11 11 11\n", + "8 8 8\n", + "14 14 14\n", + "20 20 20\n", + "6 6 6\n", + "11 11 11\n", + "13 13 13\n", + "12 12 12\n", + "21 21 21\n", + "27 27 27\n", + "21 21 21\n", + "28 28 28\n", + "14 14 14\n", + "17 17 17\n", + "33 33 33\n", + "7 7 7\n", + "18 18 18\n", + "35 35 35\n", + "8 8 8\n", + "33 33 33\n", + "8 8 8\n", + "12 12 12\n", + "19 19 19\n", + "32 32 32\n", + "14 14 14\n", + "10 10 10\n", + "17 17 17\n", + "70 70 70\n", + "8 8 8\n", + "35 35 35\n", + "3 3 3\n", + "40 40 40\n", + "5 5 5\n", + "10 10 10\n", + "8 8 8\n", + "8 8 8\n", + "9 9 9\n", + "22 22 22\n", + "6 6 6\n", + "9 9 9\n", + "164 164 164\n", + "15 15 15\n", + "24 24 24\n", + "14 14 14\n", + "73 73 73\n", + "17 17 17\n", + "4 4 4\n", + "10 10 10\n", + "3 3 3\n", + "14 14 14\n", + "13 13 13\n", + "43 43 43\n", + "10 10 10\n", + "19 19 19\n", + "19 19 19\n", + "38 38 38\n", + "12 12 12\n", + "5 5 5\n", + "9 9 9\n", + "11 11 11\n", + "10 10 10\n", + "10 10 10\n", + "4 4 4\n", + "15 15 15\n", + "22 22 22\n", + "8 8 8\n", + "15 15 15\n", + "18 18 18\n", + "6 6 6\n", + "4 4 4\n", + "16 16 16\n", + "18 18 18\n", + "6 6 6\n", + "14 14 14\n", + "30 30 30\n", + "18 18 18\n", + "23 23 23\n", + "4 4 4\n", + "17 17 17\n", + "24 24 24\n", + "7 7 7\n", + "32 32 32\n", + "12 12 12\n", + "5 5 5\n", + "21 21 21\n", + "12 12 12\n", + "18 18 18\n", + "24 24 24\n", + "12 12 12\n", + "17 17 17\n", + "14 14 14\n", + "51 51 51\n", + "30 30 30\n", + "23 23 23\n", + "22 22 22\n", + "5 5 5\n", + "9 9 9\n", + "11 11 11\n", + "6 6 6\n", + "13 13 13\n", + "6 6 6\n", + "18 18 18\n", + "9 9 9\n", + "6 6 6\n", + "44 44 44\n", + "18 18 18\n", + "3 3 3\n", + "11 11 11\n", + "11 11 11\n", + "17 17 17\n", + "18 18 18\n", + "15 15 15\n", + "24 24 24\n", + "6 6 6\n", + "40 40 40\n", + "138 138 138\n", + "12 12 12\n", + "12 12 12\n", + "4 4 4\n", + "9 9 9\n", + "9 9 9\n", + "6 6 6\n", + "15 15 15\n", + "9 9 9\n", + "6 6 6\n", + "11 11 11\n", + "78 78 78\n", + "9 9 9\n", + "21 21 21\n", + "12 12 12\n", + "18 18 18\n", + "8 8 8\n", + "14 14 14\n", + "7 7 7\n", + "15 15 15\n", + "18 18 18\n", + "14 14 14\n", + "13 13 13\n", + "16 16 16\n", + "11 11 11\n", + "15 15 15\n", + "13 13 13\n", + "30 30 30\n", + "11 11 11\n", + "37 37 37\n", + "6 6 6\n", + "14 14 14\n", + "6 6 6\n", + "59 59 59\n", + "19 19 19\n", + "32 32 32\n", + "23 23 23\n", + "11 11 11\n", + "9 9 9\n", + "3 3 3\n", + "7 7 7\n", + "12 12 12\n", + "29 29 29\n", + "6 6 6\n", + "22 22 22\n", + "14 14 14\n", + "13 13 13\n", + "11 11 11\n", + "12 12 12\n", + "4 4 4\n", + "28 28 28\n", + "45 45 45\n", + "4 4 4\n", + "5 5 5\n", + "17 17 17\n", + "6 6 6\n", + "9 9 9\n", + "13 13 13\n", + "10 10 10\n", + "9 9 9\n", + "11 11 11\n", + "6 6 6\n", + "4 4 4\n", + "3 3 3\n", + "20 20 20\n", + "13 13 13\n", + "16 16 16\n", + "19 19 19\n", + "29 29 29\n", + "66 66 66\n", + "7 7 7\n", + "12 12 12\n", + "7 7 7\n", + "9 9 9\n", + "24 24 24\n", + "21 21 21\n", + "8 8 8\n", + "23 23 23\n", + "7 7 7\n", + "31 31 31\n", + "17 17 17\n", + "8 8 8\n", + "4 4 4\n", + "4 4 4\n", + "13 13 13\n", + "11 11 11\n", + "8 8 8\n", + "12 12 12\n", + "26 26 26\n", + "25 25 25\n", + "6 6 6\n", + "197 197 197\n", + "9 9 9\n", + "17 17 17\n", + "12 12 12\n", + "6 6 6\n", + "7 7 7\n", + "12 12 12\n", + "13 13 13\n", + "14 14 14\n", + "36 36 36\n", + "70 70 70\n", + "17 17 17\n", + "5 5 5\n", + "15 15 15\n", + "33 33 33\n", + "19 19 19\n", + "5 5 5\n", + "13 13 13\n", + "109 109 109\n", + "3 3 3\n", + "10 10 10\n", + "5 5 5\n", + "18 18 18\n", + "24 24 24\n", + "15 15 15\n", + "23 23 23\n", + "4 4 4\n", + "9 9 9\n", + "6 6 6\n", + "16 16 16\n", + "7 7 7\n", + "20 20 20\n", + "22 22 22\n", + "3 3 3\n", + "30 30 30\n", + "4 4 4\n", + "10 10 10\n", + "5 5 5\n", + "18 18 18\n", + "17 17 17\n", + "5 5 5\n", + "10 10 10\n", + "108 108 108\n", + "6 6 6\n", + "11 11 11\n", + "8 8 8\n", + "23 23 23\n", + "113 113 113\n", + "9 9 9\n", + "22 22 22\n", + "4 4 4\n", + "10 10 10\n", + "22 22 22\n", + "8 8 8\n", + "41 41 41\n", + "8 8 8\n", + "16 16 16\n", + "7 7 7\n", + "5 5 5\n", + "10 10 10\n", + "3 3 3\n", + "6 6 6\n", + "9 9 9\n", + "11 11 11\n", + "4 4 4\n", + "17 17 17\n", + "16 16 16\n", + "8 8 8\n", + "65 65 65\n", + "17 17 17\n", + "19 19 19\n", + "7 7 7\n", + "13 13 13\n", + "5 5 5\n", + "20 20 20\n", + "3 3 3\n", + "7 7 7\n", + "100 100 100\n", + "4 4 4\n", + "45 45 45\n", + "33 33 33\n", + "7 7 7\n", + "11 11 11\n", + "10 10 10\n", + "22 22 22\n", + "4 4 4\n", + "5 5 5\n", + "10 10 10\n", + "46 46 46\n", + "15 15 15\n", + "9 9 9\n", + "5 5 5\n", + "10 10 10\n", + "15 15 15\n", + "9 9 9\n", + "31 31 31\n", + "26 26 26\n", + "20 20 20\n", + "8 8 8\n", + "13 13 13\n", + "9 9 9\n", + "8 8 8\n", + "10 10 10\n", + "6 6 6\n", + "11 11 11\n", + "9 9 9\n", + "4 4 4\n", + "38 38 38\n", + "72 72 72\n", + "10 10 10\n", + "10 10 10\n", + "13 13 13\n", + "64 64 64\n", + "16 16 16\n", + "6 6 6\n", + "18 18 18\n", + "20 20 20\n", + "6 6 6\n", + "17 17 17\n", + "4 4 4\n", + "4 4 4\n", + "4 4 4\n", + "39 39 39\n", + "6 6 6\n", + "6 6 6\n", + "9 9 9\n", + "8 8 8\n", + "17 17 17\n", + "7 7 7\n", + "21 21 21\n", + "8 8 8\n", + "8 8 8\n", + "10 10 10\n", + "6 6 6\n", + "4 4 4\n", + "25 25 25\n", + "3 3 3\n", + "6 6 6\n", + "4 4 4\n", + "17 17 17\n", + "37 37 37\n", + "16 16 16\n", + "33 33 33\n", + "8 8 8\n", + "4 4 4\n", + "13 13 13\n", + "14 14 14\n", + "8 8 8\n", + "15 15 15\n", + "8 8 8\n", + "8 8 8\n", + "6 6 6\n", + "53 53 53\n", + "20 20 20\n", + "57 57 57\n", + "8 8 8\n", + "34 34 34\n", + "6 6 6\n", + "7 7 7\n", + "4 4 4\n", + "8 8 8\n", + "10 10 10\n", + "13 13 13\n", + "7 7 7\n", + "77 77 77\n", + "20 20 20\n", + "18 18 18\n", + "56 56 56\n", + "3 3 3\n", + "29 29 29\n", + "5 5 5\n", + "24 24 24\n", + "17 17 17\n", + "11 11 11\n", + "4 4 4\n", + "8 8 8\n", + "17 17 17\n", + "16 16 16\n", + "10 10 10\n", + "4 4 4\n", + "22 22 22\n", + "16 16 16\n", + "26 26 26\n", + "17 17 17\n", + "16 16 16\n", + "68 68 68\n", + "9 9 9\n", + "16 16 16\n", + "21 21 21\n", + "8 8 8\n", + "14 14 14\n", + "7 7 7\n", + "3 3 3\n", + "26 26 26\n", + "6 6 6\n", + "14 14 14\n", + "34 34 34\n", + "10 10 10\n", + "26 26 26\n", + "13 13 13\n", + "47 47 47\n", + "21 21 21\n", + "11 11 11\n", + "88 88 88\n", + "17 17 17\n", + "14 14 14\n", + "16 16 16\n", + "20 20 20\n", + "21 21 21\n", + "16 16 16\n", + "5 5 5\n", + "10 10 10\n", + "7 7 7\n", + "21 21 21\n", + "51 51 51\n", + "4 4 4\n", + "52 52 52\n", + "11 11 11\n", + "16 16 16\n", + "16 16 16\n", + "30 30 30\n", + "33 33 33\n", + "19 19 19\n", + "15 15 15\n", + "34 34 34\n", + "23 23 23\n", + "34 34 34\n", + "17 17 17\n", + "71 71 71\n", + "35 35 35\n", + "7 7 7\n", + "8 8 8\n", + "8 8 8\n", + "6 6 6\n", + "34 34 34\n", + "75 75 75\n", + "4 4 4\n", + "8 8 8\n", + "16 16 16\n", + "4 4 4\n", + "18 18 18\n", + "14 14 14\n", + "74 74 74\n", + "14 14 14\n", + "4 4 4\n", + "5 5 5\n", + "5 5 5\n", + "13 13 13\n", + "11 11 11\n", + "19 19 19\n", + "7 7 7\n", + "12 12 12\n", + "14 14 14\n", + "19 19 19\n", + "11 11 11\n", + "11 11 11\n", + "8 8 8\n", + "6 6 6\n", + "8 8 8\n", + "18 18 18\n", + "22 22 22\n", + "7 7 7\n", + "50 50 50\n", + "15 15 15\n", + "10 10 10\n", + "12 12 12\n", + "8 8 8\n", + "9 9 9\n", + "4 4 4\n", + "23 23 23\n", + "13 13 13\n", + "7 7 7\n", + "10 10 10\n", + "23 23 23\n", + "19 19 19\n", + "28 28 28\n", + "14 14 14\n", + "23 23 23\n", + "20 20 20\n", + "9 9 9\n", + "12 12 12\n", + "11 11 11\n", + "14 14 14\n", + "40 40 40\n", + "73 73 73\n", + "22 22 22\n", + "79 79 79\n", + "6 6 6\n", + "20 20 20\n", + "17 17 17\n", + "52 52 52\n", + "22 22 22\n", + "16 16 16\n", + "5 5 5\n", + "14 14 14\n", + "12 12 12\n", + "15 15 15\n", + "27 27 27\n", + "6 6 6\n", + "11 11 11\n", + "55 55 55\n", + "36 36 36\n", + "10 10 10\n", + "8 8 8\n", + "5 5 5\n", + "8 8 8\n", + "12 12 12\n", + "27 27 27\n", + "9 9 9\n", + "19 19 19\n", + "5 5 5\n", + "8 8 8\n", + "18 18 18\n", + "7 7 7\n", + "11 11 11\n", + "12 12 12\n", + "9 9 9\n", + "20 20 20\n", + "5 5 5\n", + "10 10 10\n", + "97 97 97\n", + "18 18 18\n", + "12 12 12\n", + "11 11 11\n", + "8 8 8\n", + "11 11 11\n", + "8 8 8\n", + "4 4 4\n", + "25 25 25\n", + "8 8 8\n", + "14 14 14\n", + "11 11 11\n", + "29 29 29\n", + "13 13 13\n", + "4 4 4\n", + "4 4 4\n", + "14 14 14\n", + "19 19 19\n", + "6 6 6\n", + "15 15 15\n", + "10 10 10\n", + "23 23 23\n", + "19 19 19\n", + "37 37 37\n", + "44 44 44\n", + "18 18 18\n", + "13 13 13\n", + "14 14 14\n", + "35 35 35\n", + "43 43 43\n", + "6 6 6\n", + "21 21 21\n", + "10 10 10\n", + "7 7 7\n", + "27 27 27\n", + "13 13 13\n", + "3 3 3\n", + "19 19 19\n", + "15 15 15\n", + "18 18 18\n", + "22 22 22\n", + "19 19 19\n", + "5 5 5\n", + "28 28 28\n", + "117 117 117\n", + "29 29 29\n", + "60 60 60\n", + "11 11 11\n", + "7 7 7\n", + "53 53 53\n", + "32 32 32\n", + "43 43 43\n", + "8 8 8\n", + "12 12 12\n", + "14 14 14\n", + "24 24 24\n", + "32 32 32\n", + "20 20 20\n", + "11 11 11\n", + "10 10 10\n", + "41 41 41\n", + "8 8 8\n", + "50 50 50\n", + "14 14 14\n", + "15 15 15\n", + "25 25 25\n", + "6 6 6\n", + "6 6 6\n", + "14 14 14\n", + "37 37 37\n", + "13 13 13\n", + "7 7 7\n", + "8 8 8\n", + "20 20 20\n", + "16 16 16\n", + "17 17 17\n", + "14 14 14\n", + "15 15 15\n", + "4 4 4\n", + "16 16 16\n", + "11 11 11\n", + "9 9 9\n", + "7 7 7\n", + "47 47 47\n", + "8 8 8\n", + "16 16 16\n", + "8 8 8\n", + "3 3 3\n", + "5 5 5\n", + "24 24 24\n", + "73 73 73\n", + "17 17 17\n", + "5 5 5\n", + "9 9 9\n", + "46 46 46\n", + "65 65 65\n", + "8 8 8\n", + "23 23 23\n", + "25 25 25\n", + "13 13 13\n", + "32 32 32\n", + "4 4 4\n", + "14 14 14\n", + "21 21 21\n", + "26 26 26\n", + "39 39 39\n", + "5 5 5\n", + "4 4 4\n", + "23 23 23\n", + "8 8 8\n", + "33 33 33\n", + "67 67 67\n", + "26 26 26\n", + "5 5 5\n", + "7 7 7\n", + "13 13 13\n", + "10 10 10\n", + "25 25 25\n", + "9 9 9\n", + "88 88 88\n", + "21 21 21\n", + "41 41 41\n", + "4 4 4\n", + "3 3 3\n", + "19 19 19\n", + "6 6 6\n", + "7 7 7\n", + "37 37 37\n", + "63 63 63\n", + "21 21 21\n", + "13 13 13\n", + "6 6 6\n", + "8 8 8\n", + "11 11 11\n", + "183 183 183\n", + "21 21 21\n", + "15 15 15\n", + "21 21 21\n", + "12 12 12\n", + "10 10 10\n", + "59 59 59\n", + "6 6 6\n", + "16 16 16\n", + "9 9 9\n", + "48 48 48\n", + "4 4 4\n", + "7 7 7\n", + "9 9 9\n", + "5 5 5\n", + "74 74 74\n", + "8 8 8\n", + "15 15 15\n", + "14 14 14\n", + "29 29 29\n", + "15 15 15\n", + "16 16 16\n", + "18 18 18\n", + "10 10 10\n", + "15 15 15\n", + "28 28 28\n", + "10 10 10\n", + "5 5 5\n", + "30 30 30\n", + "3 3 3\n", + "44 44 44\n", + "12 12 12\n", + "6 6 6\n", + "6 6 6\n", + "24 24 24\n", + "8 8 8\n", + "7 7 7\n", + "14 14 14\n", + "3 3 3\n", + "18 18 18\n", + "11 11 11\n", + "33 33 33\n", + "16 16 16\n", + "29 29 29\n", + "4 4 4\n", + "4 4 4\n", + "7 7 7\n", + "7 7 7\n", + "5 5 5\n", + "8 8 8\n", + "8 8 8\n", + "23 23 23\n", + "7 7 7\n", + "27 27 27\n", + "4 4 4\n", + "21 21 21\n", + "4 4 4\n", + "11 11 11\n", + "17 17 17\n", + "39 39 39\n", + "58 58 58\n", + "12 12 12\n", + "11 11 11\n", + "24 24 24\n", + "4 4 4\n", + "6 6 6\n", + "20 20 20\n", + "7 7 7\n", + "25 25 25\n", + "15 15 15\n", + "9 9 9\n", + "18 18 18\n", + "6 6 6\n", + "11 11 11\n", + "7 7 7\n", + "6 6 6\n", + "15 15 15\n", + "3 3 3\n", + "26 26 26\n", + "12 12 12\n", + "12 12 12\n", + "21 21 21\n", + "21 21 21\n", + "20 20 20\n", + "12 12 12\n", + "10 10 10\n", + "21 21 21\n", + "49 49 49\n", + "10 10 10\n", + "9 9 9\n", + "10 10 10\n", + "19 19 19\n", + "12 12 12\n", + "8 8 8\n", + "20 20 20\n", + "7 7 7\n", + "4 4 4\n", + "4 4 4\n", + "19 19 19\n", + "5 5 5\n", + "22 22 22\n", + "33 33 33\n", + "7 7 7\n", + "13 13 13\n", + "23 23 23\n", + "16 16 16\n", + "5 5 5\n", + "3 3 3\n", + "4 4 4\n", + "7 7 7\n", + "11 11 11\n", + "12 12 12\n", + "70 70 70\n", + "21 21 21\n", + "12 12 12\n", + "22 22 22\n", + "12 12 12\n", + "99 99 99\n", + "14 14 14\n", + "7 7 7\n", + "101 101 101\n", + "14 14 14\n", + "3 3 3\n", + "3 3 3\n", + "21 21 21\n", + "23 23 23\n", + "19 19 19\n", + "18 18 18\n", + "5 5 5\n", + "13 13 13\n", + "25 25 25\n", + "8 8 8\n", + "32 32 32\n", + "3 3 3\n", + "8 8 8\n", + "11 11 11\n", + "18 18 18\n", + "6 6 6\n", + "6 6 6\n", + "36 36 36\n", + "4 4 4\n", + "19 19 19\n", + "8 8 8\n", + "11 11 11\n", + "26 26 26\n", + "10 10 10\n", + "22 22 22\n", + "12 12 12\n", + "83 83 83\n", + "16 16 16\n", + "7 7 7\n", + "19 19 19\n", + "4 4 4\n", + "6 6 6\n", + "25 25 25\n", + "7 7 7\n", + "72 72 72\n", + "53 53 53\n", + "38 38 38\n", + "36 36 36\n", + "11 11 11\n", + "11 11 11\n", + "13 13 13\n", + "7 7 7\n", + "37 37 37\n", + "13 13 13\n", + "27 27 27\n", + "5 5 5\n", + "11 11 11\n", + "19 19 19\n", + "3 3 3\n", + "5 5 5\n", + "18 18 18\n", + "14 14 14\n", + "4 4 4\n", + "6 6 6\n", + "13 13 13\n", + "8 8 8\n", + "6 6 6\n", + "120 120 120\n", + "10 10 10\n", + "7 7 7\n", + "14 14 14\n", + "12 12 12\n", + "11 11 11\n", + "10 10 10\n", + "33 33 33\n", + "5 5 5\n", + "13 13 13\n", + "11 11 11\n", + "9 9 9\n", + "11 11 11\n", + "3 3 3\n", + "4 4 4\n", + "27 27 27\n", + "19 19 19\n", + "10 10 10\n", + "5 5 5\n", + "11 11 11\n", + "34 34 34\n", + "24 24 24\n", + "3 3 3\n", + "15 15 15\n", + "3 3 3\n", + "12 12 12\n", + "63 63 63\n", + "19 19 19\n", + "10 10 10\n", + "68 68 68\n", + "4 4 4\n", + "11 11 11\n", + "12 12 12\n", + "8 8 8\n", + "5 5 5\n", + "3 3 3\n", + "6 6 6\n", + "34 34 34\n", + "7 7 7\n", + "16 16 16\n", + "12 12 12\n", + "19 19 19\n", + "11 11 11\n", + "46 46 46\n", + "6 6 6\n", + "18 18 18\n", + "100 100 100\n", + "9 9 9\n", + "19 19 19\n", + "34 34 34\n", + "51 51 51\n", + "51 51 51\n", + "7 7 7\n", + "7 7 7\n", + "4 4 4\n", + "13 13 13\n", + "25 25 25\n", + "26 26 26\n", + "6 6 6\n", + "11 11 11\n", + "23 23 23\n", + "21 21 21\n", + "8 8 8\n", + "7 7 7\n", + "10 10 10\n", + "8 8 8\n", + "15 15 15\n", + "11 11 11\n", + "12 12 12\n", + "13 13 13\n", + "16 16 16\n", + "9 9 9\n", + "12 12 12\n", + "14 14 14\n", + "42 42 42\n", + "12 12 12\n", + "10 10 10\n", + "9 9 9\n", + "7 7 7\n", + "3 3 3\n", + "132 132 132\n", + "7 7 7\n", + "7 7 7\n", + "9 9 9\n", + "6 6 6\n", + "7 7 7\n", + "5 5 5\n", + "26 26 26\n", + "8 8 8\n", + "12 12 12\n", + "24 24 24\n", + "50 50 50\n", + "8 8 8\n", + "16 16 16\n", + "24 24 24\n", + "14 14 14\n", + "7 7 7\n", + "16 16 16\n", + "9 9 9\n", + "37 37 37\n", + "11 11 11\n", + "42 42 42\n", + "5 5 5\n", + "18 18 18\n", + "20 20 20\n", + "16 16 16\n", + "9 9 9\n", + "23 23 23\n", + "13 13 13\n", + "5 5 5\n", + "4 4 4\n", + "20 20 20\n", + "17 17 17\n", + "12 12 12\n", + "20 20 20\n", + "20 20 20\n", + "7 7 7\n", + "25 25 25\n", + "17 17 17\n", + "8 8 8\n", + "12 12 12\n", + "9 9 9\n", + "8 8 8\n", + "7 7 7\n", + "8 8 8\n", + "16 16 16\n", + "6 6 6\n", + "4 4 4\n", + "6 6 6\n", + "15 15 15\n", + "14 14 14\n", + "46 46 46\n", + "4 4 4\n", + "6 6 6\n", + "28 28 28\n", + "27 27 27\n", + "11 11 11\n", + "9 9 9\n", + "33 33 33\n", + "11 11 11\n", + "10 10 10\n", + "17 17 17\n", + "5 5 5\n", + "14 14 14\n", + "9 9 9\n", + "21 21 21\n", + "8 8 8\n", + "9 9 9\n", + "14 14 14\n", + "23 23 23\n", + "8 8 8\n", + "24 24 24\n", + "12 12 12\n", + "12 12 12\n", + "6 6 6\n", + "28 28 28\n", + "7 7 7\n", + "16 16 16\n", + "3 3 3\n", + "32 32 32\n", + "3 3 3\n", + "31 31 31\n", + "6 6 6\n", + "10 10 10\n", + "8 8 8\n", + "12 12 12\n", + "5 5 5\n", + "12 12 12\n", + "8 8 8\n", + "32 32 32\n", + "18 18 18\n", + "9 9 9\n", + "6 6 6\n", + "3 3 3\n", + "13 13 13\n", + "3 3 3\n", + "10 10 10\n", + "7 7 7\n", + "14 14 14\n", + "9 9 9\n", + "15 15 15\n", + "5 5 5\n", + "8 8 8\n", + "117 117 117\n", + "15 15 15\n", + "17 17 17\n", + "31 31 31\n", + "16 16 16\n", + "10 10 10\n", + "10 10 10\n", + "27 27 27\n", + "9 9 9\n", + "14 14 14\n", + "45 45 45\n", + "8 8 8\n", + "12 12 12\n", + "10 10 10\n", + "11 11 11\n", + "89 89 89\n", + "100 100 100\n", + "10 10 10\n", + "6 6 6\n", + "11 11 11\n", + "96 96 96\n", + "11 11 11\n", + "12 12 12\n", + "4 4 4\n", + "14 14 14\n", + "24 24 24\n", + "23 23 23\n", + "17 17 17\n", + "11 11 11\n", + "12 12 12\n", + "7 7 7\n", + "9 9 9\n", + "6 6 6\n", + "19 19 19\n", + "26 26 26\n", + "3 3 3\n", + "9 9 9\n", + "29 29 29\n", + "7 7 7\n", + "34 34 34\n", + "5 5 5\n", + "10 10 10\n", + "16 16 16\n", + "22 22 22\n", + "10 10 10\n", + "9 9 9\n", + "4 4 4\n", + "35 35 35\n", + "19 19 19\n", + "3 3 3\n", + "20 20 20\n", + "6 6 6\n", + "7 7 7\n", + "7 7 7\n", + "9 9 9\n", + "18 18 18\n", + "49 49 49\n", + "9 9 9\n", + "33 33 33\n", + "24 24 24\n", + "10 10 10\n", + "13 13 13\n", + "11 11 11\n", + "39 39 39\n", + "15 15 15\n", + "11 11 11\n", + "11 11 11\n", + "20 20 20\n", + "19 19 19\n", + "11 11 11\n", + "7 7 7\n", + "4 4 4\n", + "5 5 5\n", + "3 3 3\n", + "6 6 6\n", + "8 8 8\n", + "3 3 3\n", + "9 9 9\n", + "18 18 18\n", + "6 6 6\n", + "9 9 9\n", + "21 21 21\n", + "13 13 13\n", + "12 12 12\n", + "7 7 7\n", + "9 9 9\n", + "8 8 8\n", + "35 35 35\n", + "33 33 33\n", + "16 16 16\n", + "4 4 4\n", + "9 9 9\n", + "25 25 25\n", + "12 12 12\n", + "16 16 16\n", + "22 22 22\n", + "9 9 9\n", + "8 8 8\n", + "12 12 12\n", + "15 15 15\n", + "12 12 12\n", + "15 15 15\n", + "60 60 60\n", + "11 11 11\n", + "29 29 29\n", + "22 22 22\n", + "3 3 3\n", + "7 7 7\n", + "6 6 6\n", + "23 23 23\n", + "16 16 16\n", + "20 20 20\n", + "6 6 6\n", + "11 11 11\n", + "25 25 25\n", + "12 12 12\n", + "15 15 15\n", + "4 4 4\n", + "6 6 6\n", + "8 8 8\n", + "24 24 24\n", + "25 25 25\n", + "23 23 23\n", + "6 6 6\n", + "5 5 5\n", + "15 15 15\n", + "18 18 18\n", + "38 38 38\n", + "14 14 14\n", + "16 16 16\n", + "24 24 24\n", + "4 4 4\n", + "19 19 19\n", + "3 3 3\n", + "14 14 14\n", + "17 17 17\n", + "9 9 9\n", + "23 23 23\n", + "15 15 15\n", + "7 7 7\n", + "9 9 9\n", + "29 29 29\n", + "23 23 23\n", + "8 8 8\n", + "21 21 21\n", + "45 45 45\n", + "87 87 87\n", + "17 17 17\n", + "10 10 10\n", + "4 4 4\n", + "13 13 13\n", + "11 11 11\n", + "29 29 29\n", + "6 6 6\n", + "11 11 11\n", + "4 4 4\n", + "4 4 4\n", + "61 61 61\n", + "15 15 15\n", + "4 4 4\n", + "54 54 54\n", + "8 8 8\n", + "15 15 15\n", + "11 11 11\n", + "12 12 12\n", + "8 8 8\n", + "51 51 51\n", + "10 10 10\n", + "12 12 12\n", + "32 32 32\n", + "18 18 18\n", + "6 6 6\n", + "17 17 17\n", + "13 13 13\n", + "3 3 3\n", + "12 12 12\n", + "38 38 38\n", + "4 4 4\n", + "7 7 7\n", + "22 22 22\n", + "8 8 8\n", + "34 34 34\n", + "8 8 8\n", + "5 5 5\n", + "12 12 12\n", + "40 40 40\n", + "22 22 22\n", + "28 28 28\n", + "5 5 5\n", + "16 16 16\n", + "7 7 7\n", + "4 4 4\n", + "7 7 7\n", + "41 41 41\n", + "11 11 11\n", + "23 23 23\n", + "38 38 38\n", + "14 14 14\n", + "6 6 6\n", + "12 12 12\n", + "9 9 9\n", + "11 11 11\n", + "39 39 39\n", + "5 5 5\n", + "4 4 4\n", + "11 11 11\n", + "49 49 49\n", + "24 24 24\n", + "9 9 9\n", + "4 4 4\n", + "29 29 29\n", + "36 36 36\n", + "18 18 18\n", + "6 6 6\n", + "14 14 14\n", + "15 15 15\n", + "11 11 11\n", + "3 3 3\n", + "4 4 4\n", + "4 4 4\n", + "8 8 8\n", + "17 17 17\n", + "4 4 4\n", + "4 4 4\n", + "7 7 7\n", + "8 8 8\n", + "20 20 20\n", + "4 4 4\n", + "17 17 17\n", + "11 11 11\n", + "18 18 18\n", + "8 8 8\n", + "30 30 30\n", + "4 4 4\n", + "83 83 83\n", + "15 15 15\n", + "13 13 13\n", + "5 5 5\n", + "33 33 33\n", + "6 6 6\n", + "8 8 8\n", + "21 21 21\n", + "38 38 38\n", + "8 8 8\n", + "6 6 6\n", + "7 7 7\n", + "10 10 10\n", + "11 11 11\n", + "38 38 38\n", + "6 6 6\n", + "28 28 28\n", + "3 3 3\n", + "6 6 6\n", + "12 12 12\n", + "14 14 14\n", + "8 8 8\n", + "4 4 4\n", + "17 17 17\n", + "4 4 4\n", + "90 90 90\n", + "11 11 11\n", + "10 10 10\n", + "6 6 6\n", + "3 3 3\n", + "16 16 16\n", + "16 16 16\n", + "7 7 7\n", + "51 51 51\n", + "4 4 4\n", + "6 6 6\n", + "8 8 8\n", + "33 33 33\n", + "62 62 62\n", + "6 6 6\n", + "6 6 6\n", + "14 14 14\n", + "10 10 10\n", + "24 24 24\n", + "9 9 9\n", + "55 55 55\n", + "16 16 16\n", + "39 39 39\n", + "6 6 6\n", + "39 39 39\n", + "20 20 20\n", + "4 4 4\n", + "6 6 6\n", + "55 55 55\n", + "6 6 6\n", + "9 9 9\n", + "7 7 7\n", + "8 8 8\n", + "5 5 5\n", + "4 4 4\n", + "7 7 7\n", + "5 5 5\n", + "47 47 47\n", + "9 9 9\n", + "19 19 19\n", + "9 9 9\n", + "24 24 24\n", + "13 13 13\n", + "11 11 11\n", + "8 8 8\n", + "11 11 11\n", + "19 19 19\n", + "8 8 8\n", + "16 16 16\n", + "15 15 15\n", + "9 9 9\n", + "4 4 4\n", + "31 31 31\n", + "4 4 4\n", + "12 12 12\n", + "8 8 8\n", + "6 6 6\n", + "51 51 51\n", + "11 11 11\n", + "13 13 13\n", + "21 21 21\n", + "9 9 9\n", + "65 65 65\n", + "31 31 31\n", + "6 6 6\n", + "3 3 3\n", + "11 11 11\n", + "24 24 24\n", + "7 7 7\n", + "3 3 3\n", + "5 5 5\n", + "15 15 15\n", + "12 12 12\n", + "12 12 12\n", + "23 23 23\n", + "21 21 21\n", + "21 21 21\n", + "91 91 91\n", + "8 8 8\n", + "26 26 26\n", + "7 7 7\n", + "9 9 9\n", + "41 41 41\n", + "89 89 89\n", + "10 10 10\n", + "24 24 24\n", + "17 17 17\n", + "12 12 12\n", + "14 14 14\n", + "12 12 12\n", + "12 12 12\n", + "36 36 36\n", + "5 5 5\n", + "55 55 55\n", + "16 16 16\n", + "7 7 7\n", + "10 10 10\n", + "18 18 18\n", + "22 22 22\n", + "3 3 3\n", + "8 8 8\n", + "4 4 4\n", + "4 4 4\n", + "22 22 22\n", + "4 4 4\n", + "6 6 6\n", + "75 75 75\n", + "8 8 8\n", + "9 9 9\n", + "46 46 46\n", + "10 10 10\n", + "16 16 16\n", + "11 11 11\n", + "28 28 28\n", + "4 4 4\n", + "9 9 9\n", + "12 12 12\n", + "9 9 9\n", + "20 20 20\n", + "13 13 13\n", + "4 4 4\n", + "10 10 10\n", + "22 22 22\n", + "14 14 14\n", + "36 36 36\n", + "18 18 18\n", + "10 10 10\n", + "7 7 7\n", + "18 18 18\n", + "39 39 39\n", + "16 16 16\n", + "200 200 200\n", + "9 9 9\n", + "12 12 12\n", + "5 5 5\n", + "33 33 33\n", + "3 3 3\n", + "9 9 9\n", + "8 8 8\n", + "19 19 19\n", + "9 9 9\n", + "10 10 10\n", + "3 3 3\n", + "106 106 106\n", + "15 15 15\n", + "10 10 10\n", + "15 15 15\n", + "18 18 18\n", + "6 6 6\n", + "10 10 10\n", + "15 15 15\n", + "35 35 35\n", + "4 4 4\n", + "31 31 31\n", + "13 13 13\n", + "10 10 10\n", + "60 60 60\n", + "7 7 7\n", + "21 21 21\n", + "13 13 13\n", + "19 19 19\n", + "35 35 35\n", + "13 13 13\n", + "6 6 6\n", + "15 15 15\n", + "15 15 15\n", + "5 5 5\n", + "29 29 29\n", + "4 4 4\n", + "51 51 51\n", + "21 21 21\n", + "10 10 10\n", + "20 20 20\n", + "21 21 21\n", + "10 10 10\n", + "9 9 9\n", + "16 16 16\n", + "14 14 14\n", + "9 9 9\n", + "11 11 11\n", + "11 11 11\n", + "11 11 11\n", + "9 9 9\n", + "93 93 93\n", + "23 23 23\n", + "13 13 13\n", + "23 23 23\n", + "4 4 4\n", + "6 6 6\n", + "17 17 17\n", + "14 14 14\n", + "16 16 16\n", + "5 5 5\n", + "29 29 29\n", + "6 6 6\n", + "4 4 4\n", + "29 29 29\n", + "18 18 18\n", + "26 26 26\n", + "8 8 8\n", + "4 4 4\n", + "25 25 25\n", + "4 4 4\n", + "10 10 10\n", + "13 13 13\n", + "11 11 11\n", + "30 30 30\n", + "20 20 20\n", + "9 9 9\n", + "20 20 20\n", + "31 31 31\n", + "31 31 31\n", + "39 39 39\n", + "10 10 10\n", + "15 15 15\n", + "15 15 15\n", + "9 9 9\n", + "9 9 9\n", + "25 25 25\n", + "28 28 28\n", + "14 14 14\n", + "4 4 4\n", + "16 16 16\n", + "3 3 3\n", + "6 6 6\n", + "8 8 8\n", + "21 21 21\n", + "15 15 15\n", + "18 18 18\n", + "16 16 16\n", + "3 3 3\n", + "4 4 4\n", + "14 14 14\n", + "4 4 4\n", + "14 14 14\n", + "13 13 13\n", + "4 4 4\n", + "4 4 4\n", + "27 27 27\n", + "12 12 12\n", + "14 14 14\n", + "10 10 10\n", + "13 13 13\n", + "29 29 29\n", + "6 6 6\n", + "34 34 34\n", + "22 22 22\n", + "3 3 3\n", + "15 15 15\n", + "11 11 11\n", + "11 11 11\n", + "6 6 6\n", + "6 6 6\n", + "7 7 7\n", + "69 69 69\n", + "9 9 9\n", + "28 28 28\n", + "4 4 4\n", + "6 6 6\n", + "12 12 12\n", + "70 70 70\n", + "61 61 61\n", + "13 13 13\n", + "10 10 10\n", + "4 4 4\n", + "7 7 7\n", + "10 10 10\n", + "12 12 12\n", + "27 27 27\n", + "26 26 26\n", + "9 9 9\n", + "8 8 8\n", + "11 11 11\n", + "11 11 11\n", + "7 7 7\n", + "9 9 9\n", + "3 3 3\n", + "19 19 19\n", + "14 14 14\n", + "11 11 11\n", + "6 6 6\n", + "3 3 3\n", + "9 9 9\n", + "46 46 46\n", + "17 17 17\n", + "19 19 19\n", + "18 18 18\n", + "21 21 21\n", + "7 7 7\n", + "9 9 9\n", + "17 17 17\n", + "68 68 68\n", + "8 8 8\n", + "160 160 160\n", + "7 7 7\n", + "20 20 20\n", + "50 50 50\n", + "5 5 5\n", + "23 23 23\n", + "14 14 14\n", + "4 4 4\n", + "43 43 43\n", + "36 36 36\n", + "6 6 6\n", + "5 5 5\n", + "8 8 8\n", + "20 20 20\n", + "6 6 6\n", + "84 84 84\n", + "42 42 42\n", + "6 6 6\n", + "4 4 4\n", + "29 29 29\n", + "8 8 8\n", + "10 10 10\n", + "17 17 17\n", + "9 9 9\n", + "20 20 20\n", + "13 13 13\n", + "27 27 27\n", + "15 15 15\n", + "20 20 20\n", + "36 36 36\n", + "23 23 23\n", + "20 20 20\n", + "33 33 33\n", + "13 13 13\n", + "10 10 10\n", + "20 20 20\n", + "20 20 20\n", + "17 17 17\n", + "23 23 23\n", + "9 9 9\n", + "4 4 4\n", + "31 31 31\n", + "7 7 7\n", + "17 17 17\n", + "32 32 32\n", + "9 9 9\n", + "8 8 8\n", + "6 6 6\n", + "14 14 14\n", + "36 36 36\n", + "3 3 3\n", + "7 7 7\n", + "5 5 5\n", + "8 8 8\n", + "25 25 25\n", + "5 5 5\n", + "19 19 19\n", + "10 10 10\n", + "18 18 18\n", + "13 13 13\n", + "16 16 16\n", + "16 16 16\n", + "4 4 4\n", + "6 6 6\n", + "18 18 18\n", + "11 11 11\n", + "18 18 18\n", + "19 19 19\n", + "40 40 40\n", + "8 8 8\n", + "22 22 22\n", + "49 49 49\n", + "6 6 6\n", + "4 4 4\n", + "9 9 9\n", + "10 10 10\n", + "83 83 83\n", + "17 17 17\n", + "9 9 9\n", + "18 18 18\n", + "8 8 8\n", + "18 18 18\n", + "12 12 12\n", + "12 12 12\n", + "6 6 6\n", + "19 19 19\n", + "23 23 23\n", + "9 9 9\n", + "6 6 6\n", + "90 90 90\n", + "28 28 28\n", + "27 27 27\n", + "19 19 19\n", + "3 3 3\n", + "8 8 8\n", + "53 53 53\n", + "10 10 10\n", + "10 10 10\n", + "30 30 30\n", + "22 22 22\n", + "18 18 18\n", + "7 7 7\n", + "8 8 8\n", + "18 18 18\n", + "23 23 23\n", + "38 38 38\n", + "4 4 4\n", + "10 10 10\n", + "23 23 23\n", + "14 14 14\n", + "7 7 7\n", + "24 24 24\n", + "20 20 20\n", + "13 13 13\n", + "8 8 8\n", + "14 14 14\n", + "11 11 11\n", + "45 45 45\n", + "53 53 53\n", + "9 9 9\n", + "12 12 12\n", + "15 15 15\n", + "23 23 23\n", + "16 16 16\n", + "13 13 13\n", + "8 8 8\n", + "4 4 4\n", + "39 39 39\n", + "10 10 10\n", + "34 34 34\n", + "22 22 22\n", + "8 8 8\n", + "23 23 23\n", + "21 21 21\n", + "10 10 10\n", + "8 8 8\n", + "8 8 8\n", + "8 8 8\n", + "10 10 10\n", + "17 17 17\n", + "14 14 14\n", + "28 28 28\n", + "118 118 118\n", + "29 29 29\n", + "34 34 34\n", + "12 12 12\n", + "23 23 23\n", + "123 123 123\n", + "13 13 13\n", + "17 17 17\n", + "14 14 14\n", + "21 21 21\n", + "10 10 10\n", + "12 12 12\n", + "32 32 32\n", + "6 6 6\n", + "33 33 33\n", + "14 14 14\n", + "8 8 8\n", + "8 8 8\n", + "14 14 14\n", + "59 59 59\n", + "25 25 25\n", + "16 16 16\n", + "11 11 11\n", + "7 7 7\n", + "11 11 11\n", + "12 12 12\n", + "13 13 13\n", + "8 8 8\n", + "75 75 75\n", + "7 7 7\n", + "28 28 28\n", + "7 7 7\n", + "12 12 12\n", + "11 11 11\n", + "15 15 15\n", + "49 49 49\n", + "13 13 13\n", + "20 20 20\n", + "9 9 9\n", + "23 23 23\n", + "126 126 126\n", + "12 12 12\n", + "9 9 9\n", + "11 11 11\n", + "12 12 12\n", + "8 8 8\n", + "12 12 12\n", + "12 12 12\n", + "89 89 89\n", + "5 5 5\n", + "21 21 21\n", + "23 23 23\n", + "6 6 6\n", + "34 34 34\n", + "75 75 75\n", + "41 41 41\n", + "21 21 21\n", + "19 19 19\n", + "12 12 12\n", + "3 3 3\n", + "10 10 10\n", + "3 3 3\n", + "6 6 6\n", + "11 11 11\n", + "8 8 8\n", + "14 14 14\n", + "23 23 23\n", + "12 12 12\n", + "8 8 8\n", + "93 93 93\n", + "75 75 75\n", + "12 12 12\n", + "17 17 17\n", + "18 18 18\n", + "20 20 20\n", + "31 31 31\n", + "32 32 32\n", + "10 10 10\n", + "18 18 18\n", + "4 4 4\n", + "4 4 4\n", + "9 9 9\n", + "24 24 24\n", + "13 13 13\n", + "10 10 10\n", + "8 8 8\n", + "16 16 16\n", + "49 49 49\n", + "31 31 31\n", + "5 5 5\n", + "6 6 6\n", + "6 6 6\n", + "7 7 7\n", + "20 20 20\n", + "9 9 9\n", + "9 9 9\n", + "14 14 14\n", + "12 12 12\n", + "16 16 16\n", + "10 10 10\n", + "25 25 25\n", + "29 29 29\n", + "19 19 19\n", + "11 11 11\n", + "4 4 4\n", + "13 13 13\n", + "9 9 9\n", + "4 4 4\n", + "23 23 23\n", + "6 6 6\n", + "5 5 5\n", + "24 24 24\n", + "9 9 9\n", + "79 79 79\n", + "5 5 5\n", + "3 3 3\n", + "10 10 10\n", + "30 30 30\n", + "14 14 14\n", + "9 9 9\n", + "10 10 10\n", + "11 11 11\n", + "16 16 16\n", + "4 4 4\n", + "11 11 11\n", + "8 8 8\n", + "14 14 14\n", + "8 8 8\n", + "35 35 35\n", + "15 15 15\n", + "19 19 19\n", + "4 4 4\n", + "9 9 9\n", + "13 13 13\n", + "15 15 15\n", + "7 7 7\n", + "172 172 172\n", + "9 9 9\n", + "12 12 12\n", + "8 8 8\n", + "16 16 16\n", + "4 4 4\n", + "5 5 5\n", + "34 34 34\n", + "4 4 4\n", + "15 15 15\n", + "28 28 28\n", + "19 19 19\n", + "9 9 9\n", + "36 36 36\n", + "19 19 19\n", + "15 15 15\n", + "32 32 32\n", + "27 27 27\n", + "57 57 57\n", + "10 10 10\n", + "9 9 9\n", + "42 42 42\n", + "8 8 8\n", + "6 6 6\n", + "22 22 22\n", + "11 11 11\n", + "8 8 8\n", + "8 8 8\n", + "8 8 8\n", + "19 19 19\n", + "7 7 7\n", + "18 18 18\n", + "11 11 11\n", + "6 6 6\n", + "12 12 12\n", + "8 8 8\n", + "12 12 12\n", + "15 15 15\n", + "10 10 10\n", + "8 8 8\n", + "6 6 6\n", + "26 26 26\n", + "4 4 4\n", + "17 17 17\n", + "9 9 9\n", + "6 6 6\n", + "13 13 13\n", + "12 12 12\n", + "23 23 23\n", + "11 11 11\n", + "83 83 83\n", + "4 4 4\n", + "10 10 10\n", + "4 4 4\n", + "31 31 31\n", + "19 19 19\n", + "13 13 13\n", + "28 28 28\n", + "11 11 11\n", + "4 4 4\n", + "12 12 12\n", + "19 19 19\n", + "4 4 4\n", + "61 61 61\n", + "9 9 9\n", + "18 18 18\n", + "13 13 13\n", + "3 3 3\n", + "50 50 50\n", + "23 23 23\n", + "66 66 66\n", + "21 21 21\n", + "30 30 30\n", + "14 14 14\n", + "10 10 10\n", + "5 5 5\n", + "11 11 11\n", + "67 67 67\n", + "46 46 46\n", + "13 13 13\n", + "20 20 20\n", + "3 3 3\n", + "9 9 9\n", + "17 17 17\n", + "12 12 12\n", + "27 27 27\n", + "6 6 6\n", + "9 9 9\n", + "62 62 62\n", + "11 11 11\n", + "8 8 8\n", + "11 11 11\n", + "3 3 3\n", + "41 41 41\n", + "12 12 12\n", + "21 21 21\n", + "7 7 7\n", + "120 120 120\n", + "11 11 11\n", + "17 17 17\n", + "32 32 32\n", + "6 6 6\n", + "22 22 22\n", + "14 14 14\n", + "13 13 13\n", + "12 12 12\n", + "7 7 7\n", + "20 20 20\n", + "5 5 5\n", + "8 8 8\n", + "8 8 8\n", + "9 9 9\n", + "28 28 28\n", + "22 22 22\n", + "8 8 8\n", + "5 5 5\n", + "3 3 3\n", + "25 25 25\n", + "12 12 12\n", + "7 7 7\n", + "10 10 10\n", + "63 63 63\n", + "13 13 13\n", + "5 5 5\n", + "6 6 6\n", + "14 14 14\n", + "44 44 44\n", + "8 8 8\n", + "9 9 9\n", + "9 9 9\n", + "19 19 19\n", + "8 8 8\n", + "6 6 6\n", + "23 23 23\n", + "24 24 24\n", + "24 24 24\n", + "11 11 11\n", + "4 4 4\n", + "9 9 9\n", + "13 13 13\n", + "25 25 25\n", + "4 4 4\n", + "4 4 4\n", + "4 4 4\n", + "34 34 34\n", + "10 10 10\n", + "38 38 38\n", + "10 10 10\n", + "3 3 3\n", + "8 8 8\n", + "13 13 13\n", + "11 11 11\n", + "9 9 9\n", + "11 11 11\n", + "15 15 15\n", + "15 15 15\n", + "20 20 20\n", + "9 9 9\n", + "36 36 36\n", + "6 6 6\n", + "9 9 9\n", + "10 10 10\n", + "23 23 23\n", + "6 6 6\n", + "13 13 13\n", + "9 9 9\n", + "6 6 6\n", + "16 16 16\n", + "4 4 4\n", + "20 20 20\n", + "122 122 122\n", + "24 24 24\n", + "34 34 34\n", + "4 4 4\n", + "3 3 3\n", + "28 28 28\n", + "47 47 47\n", + "9 9 9\n", + "4 4 4\n", + "5 5 5\n", + "16 16 16\n", + "16 16 16\n", + "13 13 13\n", + "48 48 48\n", + "38 38 38\n", + "14 14 14\n", + "27 27 27\n", + "10 10 10\n", + "62 62 62\n", + "11 11 11\n", + "8 8 8\n", + "7 7 7\n", + "19 19 19\n", + "27 27 27\n", + "15 15 15\n", + "33 33 33\n", + "105 105 105\n", + "4 4 4\n", + "6 6 6\n", + "70 70 70\n", + "15 15 15\n", + "12 12 12\n", + "5 5 5\n", + "5 5 5\n", + "10 10 10\n", + "15 15 15\n", + "11 11 11\n", + "22 22 22\n", + "28 28 28\n", + "14 14 14\n", + "13 13 13\n", + "8 8 8\n", + "20 20 20\n", + "3 3 3\n", + "4 4 4\n", + "23 23 23\n", + "6 6 6\n", + "4 4 4\n", + "5 5 5\n", + "27 27 27\n", + "6 6 6\n", + "7 7 7\n", + "10 10 10\n", + "6 6 6\n", + "9 9 9\n", + "16 16 16\n", + "10 10 10\n", + "13 13 13\n", + "4 4 4\n", + "9 9 9\n", + "4 4 4\n", + "11 11 11\n", + "7 7 7\n", + "22 22 22\n", + "22 22 22\n", + "25 25 25\n", + "14 14 14\n", + "5 5 5\n", + "4 4 4\n", + "8 8 8\n", + "10 10 10\n", + "6 6 6\n", + "14 14 14\n", + "21 21 21\n", + "21 21 21\n", + "15 15 15\n", + "5 5 5\n", + "52 52 52\n", + "8 8 8\n", + "5 5 5\n", + "16 16 16\n", + "5 5 5\n", + "39 39 39\n", + "14 14 14\n", + "5 5 5\n", + "11 11 11\n", + "11 11 11\n", + "10 10 10\n", + "30 30 30\n", + "8 8 8\n", + "27 27 27\n", + "32 32 32\n", + "36 36 36\n", + "15 15 15\n", + "18 18 18\n", + "14 14 14\n", + "18 18 18\n", + "11 11 11\n", + "5 5 5\n", + "12 12 12\n", + "30 30 30\n", + "3 3 3\n", + "15 15 15\n", + "4 4 4\n", + "6 6 6\n", + "8 8 8\n", + "6 6 6\n", + "5 5 5\n", + "23 23 23\n", + "9 9 9\n", + "6 6 6\n", + "19 19 19\n", + "12 12 12\n", + "3 3 3\n", + "30 30 30\n", + "18 18 18\n", + "25 25 25\n", + "10 10 10\n", + "7 7 7\n", + "8 8 8\n", + "13 13 13\n", + "9 9 9\n", + "31 31 31\n", + "10 10 10\n", + "23 23 23\n", + "3 3 3\n", + "10 10 10\n", + "6 6 6\n", + "30 30 30\n", + "8 8 8\n", + "43 43 43\n", + "8 8 8\n", + "39 39 39\n", + "13 13 13\n", + "6 6 6\n", + "19 19 19\n", + "18 18 18\n", + "8 8 8\n", + "11 11 11\n", + "4 4 4\n", + "15 15 15\n", + "21 21 21\n", + "28 28 28\n", + "11 11 11\n", + "17 17 17\n", + "36 36 36\n", + "8 8 8\n", + "14 14 14\n", + "19 19 19\n", + "15 15 15\n", + "10 10 10\n", + "17 17 17\n", + "3 3 3\n", + "16 16 16\n", + "9 9 9\n", + "21 21 21\n", + "13 13 13\n", + "11 11 11\n", + "19 19 19\n", + "16 16 16\n", + "17 17 17\n", + "11 11 11\n", + "9 9 9\n", + "7 7 7\n", + "10 10 10\n", + "14 14 14\n", + "13 13 13\n", + "79 79 79\n", + "13 13 13\n", + "16 16 16\n", + "9 9 9\n", + "12 12 12\n", + "32 32 32\n", + "9 9 9\n", + "28 28 28\n", + "7 7 7\n", + "3 3 3\n", + "4 4 4\n", + "17 17 17\n", + "4 4 4\n", + "7 7 7\n", + "16 16 16\n", + "12 12 12\n", + "27 27 27\n", + "4 4 4\n", + "4 4 4\n", + "12 12 12\n", + "8 8 8\n", + "27 27 27\n", + "14 14 14\n", + "10 10 10\n", + "13 13 13\n", + "7 7 7\n", + "4 4 4\n", + "11 11 11\n", + "24 24 24\n", + "3 3 3\n", + "12 12 12\n", + "8 8 8\n", + "11 11 11\n", + "11 11 11\n", + "227 227 227\n", + "23 23 23\n", + "26 26 26\n", + "30 30 30\n", + "4 4 4\n", + "17 17 17\n", + "26 26 26\n", + "34 34 34\n", + "11 11 11\n", + "25 25 25\n", + "14 14 14\n", + "5 5 5\n", + "10 10 10\n", + "9 9 9\n", + "24 24 24\n", + "10 10 10\n", + "7 7 7\n", + "4 4 4\n", + "14 14 14\n", + "9 9 9\n", + "12 12 12\n", + "43 43 43\n", + "12 12 12\n", + "4 4 4\n", + "66 66 66\n", + "10 10 10\n", + "26 26 26\n", + "91 91 91\n", + "4 4 4\n", + "8 8 8\n", + "7 7 7\n", + "8 8 8\n", + "4 4 4\n", + "16 16 16\n", + "51 51 51\n", + "7 7 7\n", + "34 34 34\n", + "8 8 8\n", + "8 8 8\n", + "17 17 17\n", + "11 11 11\n", + "24 24 24\n", + "115 115 115\n", + "14 14 14\n", + "6 6 6\n", + "11 11 11\n", + "17 17 17\n", + "8 8 8\n", + "15 15 15\n", + "14 14 14\n", + "16 16 16\n", + "10 10 10\n", + "6 6 6\n", + "8 8 8\n", + "6 6 6\n", + "6 6 6\n", + "25 25 25\n", + "23 23 23\n", + "4 4 4\n", + "5 5 5\n", + "15 15 15\n", + "55 55 55\n", + "15 15 15\n", + "24 24 24\n", + "46 46 46\n", + "10 10 10\n", + "17 17 17\n", + "11 11 11\n", + "8 8 8\n", + "4 4 4\n", + "36 36 36\n", + "27 27 27\n", + "8 8 8\n", + "24 24 24\n", + "19 19 19\n", + "13 13 13\n", + "35 35 35\n", + "6 6 6\n", + "15 15 15\n", + "9 9 9\n", + "12 12 12\n", + "4 4 4\n", + "11 11 11\n", + "22 22 22\n", + "4 4 4\n", + "32 32 32\n", + "18 18 18\n", + "27 27 27\n", + "21 21 21\n", + "8 8 8\n", + "64 64 64\n", + "10 10 10\n", + "11 11 11\n", + "9 9 9\n", + "9 9 9\n", + "17 17 17\n", + "15 15 15\n", + "28 28 28\n", + "39 39 39\n", + "29 29 29\n", + "38 38 38\n", + "9 9 9\n", + "10 10 10\n", + "9 9 9\n", + "6 6 6\n", + "4 4 4\n", + "4 4 4\n", + "36 36 36\n", + "8 8 8\n", + "3 3 3\n", + "27 27 27\n", + "15 15 15\n", + "5 5 5\n", + "13 13 13\n", + "4 4 4\n", + "10 10 10\n", + "9 9 9\n", + "11 11 11\n", + "24 24 24\n", + "17 17 17\n", + "8 8 8\n", + "4 4 4\n", + "3 3 3\n", + "3 3 3\n", + "15 15 15\n", + "9 9 9\n", + "6 6 6\n", + "19 19 19\n", + "8 8 8\n", + "81 81 81\n", + "7 7 7\n", + "4 4 4\n", + "22 22 22\n", + "3 3 3\n", + "68 68 68\n", + "3 3 3\n", + "11 11 11\n", + "5 5 5\n", + "11 11 11\n", + "18 18 18\n", + "7 7 7\n", + "70 70 70\n", + "9 9 9\n", + "26 26 26\n", + "36 36 36\n", + "17 17 17\n", + "26 26 26\n", + "4 4 4\n", + "7 7 7\n", + "19 19 19\n", + "16 16 16\n", + "7 7 7\n", + "9 9 9\n", + "25 25 25\n", + "6 6 6\n", + "6 6 6\n", + "4 4 4\n", + "10 10 10\n", + "4 4 4\n", + "13 13 13\n", + "31 31 31\n", + "14 14 14\n", + "12 12 12\n", + "7 7 7\n", + "46 46 46\n", + "46 46 46\n", + "18 18 18\n", + "20 20 20\n", + "16 16 16\n", + "19 19 19\n", + "8 8 8\n", + "10 10 10\n", + "22 22 22\n", + "7 7 7\n", + "8 8 8\n", + "4 4 4\n", + "16 16 16\n", + "11 11 11\n", + "11 11 11\n", + "17 17 17\n", + "27 27 27\n", + "6 6 6\n", + "18 18 18\n", + "15 15 15\n", + "4 4 4\n", + "24 24 24\n", + "14 14 14\n", + "23 23 23\n", + "16 16 16\n", + "12 12 12\n", + "5 5 5\n", + "5 5 5\n", + "1037 512 512\n", + "21 21 21\n", + "13 13 13\n", + "3 3 3\n", + "12 12 12\n", + "17 17 17\n", + "43 43 43\n", + "8 8 8\n", + "41 41 41\n", + "40 40 40\n", + "21 21 21\n", + "3 3 3\n", + "28 28 28\n", + "14 14 14\n", + "10 10 10\n", + "32 32 32\n", + "11 11 11\n", + "11 11 11\n", + "28 28 28\n", + "23 23 23\n", + "14 14 14\n", + "6 6 6\n", + "18 18 18\n", + "12 12 12\n", + "15 15 15\n", + "30 30 30\n", + "17 17 17\n", + "7 7 7\n", + "10 10 10\n", + "28 28 28\n" + ] + } + ], + "execution_count": 19 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T13:58:37.609529Z", + "start_time": "2025-06-27T13:58:37.576455Z" + } + }, + "cell_type": "code", + "source": [ + "print(f\"F1 on ES data before postprocessing {fine_grained_flausch_by_label(test_gold_spans, test_baseline_spans)['TOTAL']['STRICT']}\")\n", + "print(f\"F1 on ES data with GE2017 postprocessing {fine_grained_flausch_by_label(test_gold_spans, ge2017_rules_test_pred_spans)['TOTAL']['STRICT']}\")" + ], + "id": "c2e99dae31d7e359", + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "F1 on ES data before postprocessing {'prec': 0.7364043506078055, 'rec': 0.7587343441001978, 'f1': 0.7474025974025974}\n", + "F1 on ES data with GE2017 postprocessing {'prec': 0.7312859884836852, 'rec': 0.7534607778510217, 'f1': 0.7422077922077922}\n" + ] + } + ], + "execution_count": 20 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T14:09:19.270705Z", + "start_time": "2025-06-27T14:03:29.168435Z" + } + }, + "cell_type": "code", + "source": [ + "test_comments = test_data\n", + "\n", + "test_comments['predicted_labels'] = None\n", + "test_comments['predicted_probs'] = None\n", + "test_comments['offset_mapping'] = None\n", + "test_comments['text_tokens'] = None\n", + "\n", + "for idx in range(len(test_comments)): #range(15):\n", + " row = test_comments.iloc[idx]\n", + " text = row['comment']\n", + " key = (row['document'], row['comment_id'])\n", + "\n", + " text_tokens = classifier.tokenizer.tokenize(text)\n", + " test_comments.at[idx, 'text_tokens'] = text_tokens\n", + "\n", + " device = next(classifier.model.parameters()).device\n", + " inputs = classifier.tokenizer(text, return_tensors=\"pt\", truncation=True, max_length=512, return_offsets_mapping=True)\n", + "\n", + " offset_mapping = inputs.pop('offset_mapping')\n", + " test_comments.at[idx, 'offset_mapping'] = offset_mapping.cpu().numpy()[0].tolist()\n", + " inputs = {k: v.to(device) for k, v in inputs.items()}\n", + "\n", + " # Vorhersage\n", + " with torch.no_grad():\n", + " outputs = classifier.model(**inputs)\n", + "\n", + " predicted_labels = torch.argmax(outputs.logits, dim=2)[0].cpu().numpy()\n", + " predicted_probs = torch.nn.functional.softmax(outputs.logits, dim=2)[0].cpu().numpy()\n", + " test_comments.at[idx, 'predicted_labels'] = predicted_labels\n", + " test_comments.at[idx, 'predicted_probs'] = predicted_probs" + ], + "id": "40a52a3ed629de0e", + "outputs": [], + "execution_count": 21 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T14:09:24.886675Z", + "start_time": "2025-06-27T14:09:24.806396Z" + } + }, + "cell_type": "code", + "source": "test_comments", + "id": "94f29be61115fa59", + "outputs": [ + { + "data": { + "text/plain": [ + " document comment_id comment \\\n", + "0 NDY-004 1 Lol i love lochis \n", + "1 NDY-004 2 ihr singt voll gut :) \n", + "2 NDY-004 3 Junge fick dich \n", + "3 NDY-004 4 Ihr seit die besten \n", + "4 NDY-004 5 ihr seit die ALLER besten ich finde euch soooo... \n", + "... ... ... ... \n", + "9224 NDY-203 522 hihi kannst du mich grüßen 💕 👋 😍 Achso wusstes... \n", + "9225 NDY-203 523 #Glocke aktiviert 👑 Ich liebe deine Videos 💍 💎... \n", + "9226 NDY-203 524 Bist die beste ❤ Bitte Grüße mich 💕 ❤ 😘 😍 \n", + "9227 NDY-203 525 Hi Bonny ❤️ War letztens auf'm Flughafen , und... \n", + "9228 NDY-203 526 du bist die beste ich bin neu ich hab dich sof... \n", + "\n", + " predicted_labels \\\n", + "0 [0, 0, 0, 0, 0, 0, 0, 0] \n", + "1 [0, 2, 12, 12, 12, 12, 12, 0] \n", + "2 [0, 0, 0, 0, 0, 0] \n", + "3 [0, 3, 13, 13, 13, 0] \n", + "4 [0, 3, 13, 13, 13, 13, 13, 3, 13, 13, 13, 13, ... \n", + "... ... \n", + "9224 [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 11, 0, 11, 11, ... \n", + "9225 [0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 11, 11, 11, 11,... \n", + "9226 [0, 3, 13, 13, 13, 13, 0, 0, 0, 1, 1, 11, 11, ... \n", + "9227 [0, 0, 0, 0, 1, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0,... \n", + "9228 [0, 3, 13, 13, 13, 0, 0, 0, 1, 11, 11, 11, 11,... \n", + "\n", + " predicted_probs \\\n", + "0 [[0.99999654, 1.7456429e-07, 1.6115715e-07, 1.... \n", + "1 [[0.9999976, 1.1218729e-07, 1.239344e-07, 1.50... \n", + "2 [[0.9999981, 5.8623616e-08, 1.05891374e-07, 1.... \n", + "3 [[0.99999774, 1.6417343e-07, 1.384722e-07, 1.1... \n", + "4 [[0.99999785, 1.2960982e-07, 1.4320104e-07, 1.... \n", + "... ... \n", + "9224 [[0.99999774, 1.8107521e-07, 1.0220851e-07, 9.... \n", + "9225 [[0.9999976, 1.1908668e-07, 8.492378e-08, 6.60... \n", + "9226 [[0.9999974, 2.1362885e-07, 1.2580301e-07, 9.5... \n", + "9227 [[0.99999523, 6.63842e-07, 2.0147786e-07, 1.16... \n", + "9228 [[0.999997, 3.4811254e-07, 7.750037e-08, 7.272... \n", + "\n", + " offset_mapping \\\n", + "0 [[0, 0], [0, 1], [1, 3], [4, 5], [6, 10], [11,... \n", + "1 [[0, 0], [0, 3], [4, 8], [8, 9], [10, 14], [15... \n", + "2 [[0, 0], [0, 4], [4, 5], [6, 10], [11, 15], [0... \n", + "3 [[0, 0], [0, 3], [4, 8], [9, 12], [13, 19], [0... \n", + "4 [[0, 0], [0, 3], [4, 8], [9, 12], [13, 17], [1... \n", + "... ... \n", + "9224 [[0, 0], [0, 4], [5, 11], [12, 14], [15, 19], ... \n", + "9225 [[0, 0], [0, 1], [1, 2], [2, 6], [6, 7], [8, 1... \n", + "9226 [[0, 0], [0, 3], [3, 4], [5, 8], [9, 14], [15,... \n", + "9227 [[0, 0], [0, 2], [3, 6], [6, 8], [9, 10], [10,... \n", + "9228 [[0, 0], [0, 2], [3, 7], [8, 11], [12, 17], [1... \n", + "\n", + " text_tokens \n", + "0 [▁L, ol, ▁i, ▁love, ▁loc, his] \n", + "1 [▁ihr, ▁sing, t, ▁voll, ▁gut, ▁:)] \n", + "2 [▁Jung, e, ▁fick, ▁dich] \n", + "3 [▁Ihr, ▁seit, ▁die, ▁besten] \n", + "4 [▁ihr, ▁seit, ▁die, ▁ALLE, R, ▁besten, ▁ich, ▁... \n", + "... ... \n", + "9224 [▁hihi, ▁kannst, ▁du, ▁mich, ▁gr, üß, en, ▁, 💕... \n", + "9225 [▁#, G, lock, e, ▁aktiv, iert, ▁, 👑, ▁Ich, ▁li... \n", + "9226 [▁Bis, t, ▁die, ▁beste, ▁❤, ▁Bitte, ▁Grüße, ▁m... \n", + "9227 [▁Hi, ▁Bon, ny, ▁❤, ️, ▁War, ▁letzten, s, ▁auf... \n", + "9228 [▁du, ▁bist, ▁die, ▁beste, ▁ich, ▁bin, ▁neu, ▁... \n", + "\n", + "[9229 rows x 7 columns]" + ], + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
documentcomment_idcommentpredicted_labelspredicted_probsoffset_mappingtext_tokens
0NDY-0041Lol i love lochis[0, 0, 0, 0, 0, 0, 0, 0][[0.99999654, 1.7456429e-07, 1.6115715e-07, 1....[[0, 0], [0, 1], [1, 3], [4, 5], [6, 10], [11,...[▁L, ol, ▁i, ▁love, ▁loc, his]
1NDY-0042ihr singt voll gut :)[0, 2, 12, 12, 12, 12, 12, 0][[0.9999976, 1.1218729e-07, 1.239344e-07, 1.50...[[0, 0], [0, 3], [4, 8], [8, 9], [10, 14], [15...[▁ihr, ▁sing, t, ▁voll, ▁gut, ▁:)]
2NDY-0043Junge fick dich[0, 0, 0, 0, 0, 0][[0.9999981, 5.8623616e-08, 1.05891374e-07, 1....[[0, 0], [0, 4], [4, 5], [6, 10], [11, 15], [0...[▁Jung, e, ▁fick, ▁dich]
3NDY-0044Ihr seit die besten[0, 3, 13, 13, 13, 0][[0.99999774, 1.6417343e-07, 1.384722e-07, 1.1...[[0, 0], [0, 3], [4, 8], [9, 12], [13, 19], [0...[▁Ihr, ▁seit, ▁die, ▁besten]
4NDY-0045ihr seit die ALLER besten ich finde euch soooo...[0, 3, 13, 13, 13, 13, 13, 3, 13, 13, 13, 13, ...[[0.99999785, 1.2960982e-07, 1.4320104e-07, 1....[[0, 0], [0, 3], [4, 8], [9, 12], [13, 17], [1...[▁ihr, ▁seit, ▁die, ▁ALLE, R, ▁besten, ▁ich, ▁...
........................
9224NDY-203522hihi kannst du mich grüßen 💕 👋 😍 Achso wusstes...[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 11, 0, 11, 11, ...[[0.99999774, 1.8107521e-07, 1.0220851e-07, 9....[[0, 0], [0, 4], [5, 11], [12, 14], [15, 19], ...[▁hihi, ▁kannst, ▁du, ▁mich, ▁gr, üß, en, ▁, 💕...
9225NDY-203523#Glocke aktiviert 👑 Ich liebe deine Videos 💍 💎...[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 11, 11, 11, 11,...[[0.9999976, 1.1908668e-07, 8.492378e-08, 6.60...[[0, 0], [0, 1], [1, 2], [2, 6], [6, 7], [8, 1...[▁#, G, lock, e, ▁aktiv, iert, ▁, 👑, ▁Ich, ▁li...
9226NDY-203524Bist die beste ❤ Bitte Grüße mich 💕 ❤ 😘 😍[0, 3, 13, 13, 13, 13, 0, 0, 0, 1, 1, 11, 11, ...[[0.9999974, 2.1362885e-07, 1.2580301e-07, 9.5...[[0, 0], [0, 3], [3, 4], [5, 8], [9, 14], [15,...[▁Bis, t, ▁die, ▁beste, ▁❤, ▁Bitte, ▁Grüße, ▁m...
9227NDY-203525Hi Bonny ❤️ War letztens auf'm Flughafen , und...[0, 0, 0, 0, 1, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0,...[[0.99999523, 6.63842e-07, 2.0147786e-07, 1.16...[[0, 0], [0, 2], [3, 6], [6, 8], [9, 10], [10,...[▁Hi, ▁Bon, ny, ▁❤, ️, ▁War, ▁letzten, s, ▁auf...
9228NDY-203526du bist die beste ich bin neu ich hab dich sof...[0, 3, 13, 13, 13, 0, 0, 0, 1, 11, 11, 11, 11,...[[0.999997, 3.4811254e-07, 7.750037e-08, 7.272...[[0, 0], [0, 2], [3, 7], [8, 11], [12, 17], [1...[▁du, ▁bist, ▁die, ▁beste, ▁ich, ▁bin, ▁neu, ▁...
\n", + "

9229 rows × 7 columns

\n", + "
" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 22 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T14:14:45.361749Z", + "start_time": "2025-06-27T14:14:45.128024Z" + } + }, + "cell_type": "code", + "source": [ + "test_comments['predicted_spans'] = test_comments.apply(pred_to_spans, axis=1, result_type='expand')\n", + "test_comments_spans = pd.DataFrame(test_comments.apply(convert_spans, axis=1).explode().dropna().tolist())" + ], + "id": "7dd47953b687e698", + "outputs": [], + "execution_count": 38 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T14:15:32.520978Z", + "start_time": "2025-06-27T14:15:32.516550Z" + } + }, + "cell_type": "code", + "source": "test_comments_spans", + "id": "d3cfc4c08b921b97", + "outputs": [ + { + "data": { + "text/plain": [ + " document comment_id type start end\n", + "0 NDY-004 2 compliment 0 21\n", + "1 NDY-004 4 affection declaration 0 19\n", + "2 NDY-004 5 affection declaration 0 25\n", + "3 NDY-004 5 affection declaration 26 56\n", + "4 NDY-004 5 positive feedback 57 71\n", + "... ... ... ... ... ...\n", + "5498 NDY-203 526 affection declaration 0 17\n", + "5499 NDY-203 526 positive feedback 30 59\n", + "5500 NDY-203 526 positive feedback 64 104\n", + "5501 NDY-203 526 affection declaration 105 106\n", + "5502 NDY-203 526 affection declaration 105 114\n", + "\n", + "[5503 rows x 5 columns]" + ], + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
documentcomment_idtypestartend
0NDY-0042compliment021
1NDY-0044affection declaration019
2NDY-0045affection declaration025
3NDY-0045affection declaration2656
4NDY-0045positive feedback5771
..................
5498NDY-203526affection declaration017
5499NDY-203526positive feedback3059
5500NDY-203526positive feedback64104
5501NDY-203526affection declaration105106
5502NDY-203526affection declaration105114
\n", + "

5503 rows × 5 columns

\n", + "
" + ] + }, + "execution_count": 40, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 40 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T14:39:35.231099Z", + "start_time": "2025-06-27T14:39:35.222087Z" + } + }, + "cell_type": "code", + "source": "test_comments_spans.to_csv(\"./submissions/task2-predicted.csv\", index=False)", + "id": "cbcf3c8b0fbcee46", + "outputs": [], + "execution_count": 48 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T14:39:36.841319Z", + "start_time": "2025-06-27T14:39:36.522665Z" + } + }, + "cell_type": "code", + "source": "!head -n 10 ./submissions/task2-predicted.csv", + "id": "ebcd1770eeecf063", + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "document,comment_id,type,start,end\r\n", + "NDY-004,2,compliment,0,21\r\n", + "NDY-004,4,affection declaration,0,19\r\n", + "NDY-004,5,affection declaration,0,25\r\n", + "NDY-004,5,affection declaration,26,56\r\n", + "NDY-004,5,positive feedback,57,71\r\n", + "NDY-004,5,affection declaration,72,87\r\n", + "NDY-004,6,affection declaration,0,17\r\n", + "NDY-004,8,implicit,0,46\r\n", + "NDY-004,16,compliment,0,29\r\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "To disable this warning, you can either:\n", + "\t- Avoid using `tokenizers` before the fork if possible\n", + "\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n" + ] + } + ], + "execution_count": 49 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T14:15:56.000961Z", + "start_time": "2025-06-27T14:15:55.676535Z" + } + }, + "cell_type": "code", + "source": "!cp './submissions/task2-predicted.csv' './submissions/subtask2_submission2.csv'", + "id": "f1d154948530d203", + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "To disable this warning, you can either:\n", + "\t- Avoid using `tokenizers` before the fork if possible\n", + "\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n" + ] + } + ], + "execution_count": 43 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T14:16:33.921016Z", + "start_time": "2025-06-27T14:16:33.599Z" + } + }, + "cell_type": "code", + "source": "!head -n 10 './submissions/subtask2_submission1.csv'", + "id": "dcac8dbbe2d62001", + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "document,comment_id,type,start,end\r\n", + "NDY-004,1,affection declaration,0,17\r\n", + "NDY-004,2,compliment,0,21\r\n", + "NDY-004,4,affection declaration,0,19\r\n", + "NDY-004,5,affection declaration,0,25\r\n", + "NDY-004,5,affection declaration,26,56\r\n", + "NDY-004,5,positive feedback,57,71\r\n", + "NDY-004,5,affection declaration,72,87\r\n", + "NDY-004,6,affection declaration,0,17\r\n", + "NDY-004,8,implicit,0,46\r\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "To disable this warning, you can either:\n", + "\t- Avoid using `tokenizers` before the fork if possible\n", + "\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n" + ] + } + ], + "execution_count": 46 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T22:07:26.334867Z", + "start_time": "2025-06-27T22:07:26.325629Z" + } + }, + "cell_type": "code", + "source": "test_comments_spans = pd.read_csv(\"./submissions/task2-predicted.csv\")", + "id": "4ae3d9e4c556a288", + "outputs": [], + "execution_count": 56 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T22:07:30.302897Z", + "start_time": "2025-06-27T22:07:30.290021Z" + } + }, + "cell_type": "code", + "source": "test_comments_spans", + "id": "156c9b1c48a954b4", + "outputs": [ + { + "data": { + "text/plain": [ + " document comment_id type start end\n", + "0 NDY-004 2 compliment 0 21\n", + "1 NDY-004 4 affection declaration 0 19\n", + "2 NDY-004 5 affection declaration 0 25\n", + "3 NDY-004 5 affection declaration 26 56\n", + "4 NDY-004 5 positive feedback 57 71\n", + "... ... ... ... ... ...\n", + "5498 NDY-203 526 affection declaration 0 17\n", + "5499 NDY-203 526 positive feedback 30 59\n", + "5500 NDY-203 526 positive feedback 64 104\n", + "5501 NDY-203 526 affection declaration 105 106\n", + "5502 NDY-203 526 affection declaration 105 114\n", + "\n", + "[5503 rows x 5 columns]" + ], + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
documentcomment_idtypestartend
0NDY-0042compliment021
1NDY-0044affection declaration019
2NDY-0045affection declaration025
3NDY-0045affection declaration2656
4NDY-0045positive feedback5771
..................
5498NDY-203526affection declaration017
5499NDY-203526positive feedback3059
5500NDY-203526positive feedback64104
5501NDY-203526affection declaration105106
5502NDY-203526affection declaration105114
\n", + "

5503 rows × 5 columns

\n", + "
" + ] + }, + "execution_count": 57, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 57 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T22:07:50.819958Z", + "start_time": "2025-06-27T22:07:50.699928Z" + } + }, + "cell_type": "code", + "source": "test_comments", + "id": "2b63b3b12b9648f6", + "outputs": [ + { + "data": { + "text/plain": [ + " document comment_id comment \\\n", + "0 NDY-004 1 Lol i love lochis \n", + "1 NDY-004 2 ihr singt voll gut :) \n", + "2 NDY-004 3 Junge fick dich \n", + "3 NDY-004 4 Ihr seit die besten \n", + "4 NDY-004 5 ihr seit die ALLER besten ich finde euch soooo... \n", + "... ... ... ... \n", + "9224 NDY-203 522 hihi kannst du mich grüßen 💕 👋 😍 Achso wusstes... \n", + "9225 NDY-203 523 #Glocke aktiviert 👑 Ich liebe deine Videos 💍 💎... \n", + "9226 NDY-203 524 Bist die beste ❤ Bitte Grüße mich 💕 ❤ 😘 😍 \n", + "9227 NDY-203 525 Hi Bonny ❤️ War letztens auf'm Flughafen , und... \n", + "9228 NDY-203 526 du bist die beste ich bin neu ich hab dich sof... \n", + "\n", + " predicted_labels \\\n", + "0 [0, 0, 0, 0, 0, 0, 0, 0] \n", + "1 [0, 2, 12, 12, 12, 12, 12, 0] \n", + "2 [0, 0, 0, 0, 0, 0] \n", + "3 [0, 3, 13, 13, 13, 0] \n", + "4 [0, 3, 13, 13, 13, 13, 13, 3, 13, 13, 13, 13, ... \n", + "... ... \n", + "9224 [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 11, 0, 11, 11, ... \n", + "9225 [0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 11, 11, 11, 11,... \n", + "9226 [0, 3, 13, 13, 13, 13, 0, 0, 0, 1, 1, 11, 11, ... \n", + "9227 [0, 0, 0, 0, 1, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0,... \n", + "9228 [0, 3, 13, 13, 13, 0, 0, 0, 1, 11, 11, 11, 11,... \n", + "\n", + " predicted_probs \\\n", + "0 [[0.99999654, 1.7456429e-07, 1.6115715e-07, 1.... \n", + "1 [[0.9999976, 1.1218729e-07, 1.239344e-07, 1.50... \n", + "2 [[0.9999981, 5.8623616e-08, 1.05891374e-07, 1.... \n", + "3 [[0.99999774, 1.6417343e-07, 1.384722e-07, 1.1... \n", + "4 [[0.99999785, 1.2960982e-07, 1.4320104e-07, 1.... \n", + "... ... \n", + "9224 [[0.99999774, 1.8107521e-07, 1.0220851e-07, 9.... \n", + "9225 [[0.9999976, 1.1908668e-07, 8.492378e-08, 6.60... \n", + "9226 [[0.9999974, 2.1362885e-07, 1.2580301e-07, 9.5... \n", + "9227 [[0.99999523, 6.63842e-07, 2.0147786e-07, 1.16... \n", + "9228 [[0.999997, 3.4811254e-07, 7.750037e-08, 7.272... \n", + "\n", + " offset_mapping \\\n", + "0 [[0, 0], [0, 1], [1, 3], [4, 5], [6, 10], [11,... \n", + "1 [[0, 0], [0, 3], [4, 8], [8, 9], [10, 14], [15... \n", + "2 [[0, 0], [0, 4], [4, 5], [6, 10], [11, 15], [0... \n", + "3 [[0, 0], [0, 3], [4, 8], [9, 12], [13, 19], [0... \n", + "4 [[0, 0], [0, 3], [4, 8], [9, 12], [13, 17], [1... \n", + "... ... \n", + "9224 [[0, 0], [0, 4], [5, 11], [12, 14], [15, 19], ... \n", + "9225 [[0, 0], [0, 1], [1, 2], [2, 6], [6, 7], [8, 1... \n", + "9226 [[0, 0], [0, 3], [3, 4], [5, 8], [9, 14], [15,... \n", + "9227 [[0, 0], [0, 2], [3, 6], [6, 8], [9, 10], [10,... \n", + "9228 [[0, 0], [0, 2], [3, 7], [8, 11], [12, 17], [1... \n", + "\n", + " text_tokens \\\n", + "0 [▁L, ol, ▁i, ▁love, ▁loc, his] \n", + "1 [▁ihr, ▁sing, t, ▁voll, ▁gut, ▁:)] \n", + "2 [▁Jung, e, ▁fick, ▁dich] \n", + "3 [▁Ihr, ▁seit, ▁die, ▁besten] \n", + "4 [▁ihr, ▁seit, ▁die, ▁ALLE, R, ▁besten, ▁ich, ▁... \n", + "... ... \n", + "9224 [▁hihi, ▁kannst, ▁du, ▁mich, ▁gr, üß, en, ▁, 💕... \n", + "9225 [▁#, G, lock, e, ▁aktiv, iert, ▁, 👑, ▁Ich, ▁li... \n", + "9226 [▁Bis, t, ▁die, ▁beste, ▁❤, ▁Bitte, ▁Grüße, ▁m... \n", + "9227 [▁Hi, ▁Bon, ny, ▁❤, ️, ▁War, ▁letzten, s, ▁auf... \n", + "9228 [▁du, ▁bist, ▁die, ▁beste, ▁ich, ▁bin, ▁neu, ▁... \n", + "\n", + " predicted_spans \n", + "0 [] \n", + "1 [{'type': 'compliment', 'start': 0, 'end': 21,... \n", + "2 [] \n", + "3 [{'type': 'affection declaration', 'start': 0,... \n", + "4 [{'type': 'affection declaration', 'start': 0,... \n", + "... ... \n", + "9224 [{'type': 'positive feedback', 'start': 27, 'e... \n", + "9225 [{'type': 'positive feedback', 'start': 20, 'e... \n", + "9226 [{'type': 'affection declaration', 'start': 0,... \n", + "9227 [{'type': 'positive feedback', 'start': 9, 'en... \n", + "9228 [{'type': 'affection declaration', 'start': 0,... \n", + "\n", + "[9229 rows x 8 columns]" + ], + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
documentcomment_idcommentpredicted_labelspredicted_probsoffset_mappingtext_tokenspredicted_spans
0NDY-0041Lol i love lochis[0, 0, 0, 0, 0, 0, 0, 0][[0.99999654, 1.7456429e-07, 1.6115715e-07, 1....[[0, 0], [0, 1], [1, 3], [4, 5], [6, 10], [11,...[▁L, ol, ▁i, ▁love, ▁loc, his][]
1NDY-0042ihr singt voll gut :)[0, 2, 12, 12, 12, 12, 12, 0][[0.9999976, 1.1218729e-07, 1.239344e-07, 1.50...[[0, 0], [0, 3], [4, 8], [8, 9], [10, 14], [15...[▁ihr, ▁sing, t, ▁voll, ▁gut, ▁:)][{'type': 'compliment', 'start': 0, 'end': 21,...
2NDY-0043Junge fick dich[0, 0, 0, 0, 0, 0][[0.9999981, 5.8623616e-08, 1.05891374e-07, 1....[[0, 0], [0, 4], [4, 5], [6, 10], [11, 15], [0...[▁Jung, e, ▁fick, ▁dich][]
3NDY-0044Ihr seit die besten[0, 3, 13, 13, 13, 0][[0.99999774, 1.6417343e-07, 1.384722e-07, 1.1...[[0, 0], [0, 3], [4, 8], [9, 12], [13, 19], [0...[▁Ihr, ▁seit, ▁die, ▁besten][{'type': 'affection declaration', 'start': 0,...
4NDY-0045ihr seit die ALLER besten ich finde euch soooo...[0, 3, 13, 13, 13, 13, 13, 3, 13, 13, 13, 13, ...[[0.99999785, 1.2960982e-07, 1.4320104e-07, 1....[[0, 0], [0, 3], [4, 8], [9, 12], [13, 17], [1...[▁ihr, ▁seit, ▁die, ▁ALLE, R, ▁besten, ▁ich, ▁...[{'type': 'affection declaration', 'start': 0,...
...........................
9224NDY-203522hihi kannst du mich grüßen 💕 👋 😍 Achso wusstes...[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 11, 0, 11, 11, ...[[0.99999774, 1.8107521e-07, 1.0220851e-07, 9....[[0, 0], [0, 4], [5, 11], [12, 14], [15, 19], ...[▁hihi, ▁kannst, ▁du, ▁mich, ▁gr, üß, en, ▁, 💕...[{'type': 'positive feedback', 'start': 27, 'e...
9225NDY-203523#Glocke aktiviert 👑 Ich liebe deine Videos 💍 💎...[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 11, 11, 11, 11,...[[0.9999976, 1.1908668e-07, 8.492378e-08, 6.60...[[0, 0], [0, 1], [1, 2], [2, 6], [6, 7], [8, 1...[▁#, G, lock, e, ▁aktiv, iert, ▁, 👑, ▁Ich, ▁li...[{'type': 'positive feedback', 'start': 20, 'e...
9226NDY-203524Bist die beste ❤ Bitte Grüße mich 💕 ❤ 😘 😍[0, 3, 13, 13, 13, 13, 0, 0, 0, 1, 1, 11, 11, ...[[0.9999974, 2.1362885e-07, 1.2580301e-07, 9.5...[[0, 0], [0, 3], [3, 4], [5, 8], [9, 14], [15,...[▁Bis, t, ▁die, ▁beste, ▁❤, ▁Bitte, ▁Grüße, ▁m...[{'type': 'affection declaration', 'start': 0,...
9227NDY-203525Hi Bonny ❤️ War letztens auf'm Flughafen , und...[0, 0, 0, 0, 1, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0,...[[0.99999523, 6.63842e-07, 2.0147786e-07, 1.16...[[0, 0], [0, 2], [3, 6], [6, 8], [9, 10], [10,...[▁Hi, ▁Bon, ny, ▁❤, ️, ▁War, ▁letzten, s, ▁auf...[{'type': 'positive feedback', 'start': 9, 'en...
9228NDY-203526du bist die beste ich bin neu ich hab dich sof...[0, 3, 13, 13, 13, 0, 0, 0, 1, 11, 11, 11, 11,...[[0.999997, 3.4811254e-07, 7.750037e-08, 7.272...[[0, 0], [0, 2], [3, 7], [8, 11], [12, 17], [1...[▁du, ▁bist, ▁die, ▁beste, ▁ich, ▁bin, ▁neu, ▁...[{'type': 'affection declaration', 'start': 0,...
\n", + "

9229 rows × 8 columns

\n", + "
" + ] + }, + "execution_count": 58, + "metadata": {}, + "output_type": "execute_result" + } + ], + "execution_count": 58 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T22:09:58.052637Z", + "start_time": "2025-06-27T22:09:57.997729Z" + } + }, + "cell_type": "code", + "source": "test_comments['has_spans'] = test_comments.apply(lambda x: len(x['predicted_spans']) > 0, axis=1)", + "id": "263a51fec4f4672", + "outputs": [], + "execution_count": 60 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T22:10:35.264094Z", + "start_time": "2025-06-27T22:10:35.260301Z" + } + }, + "cell_type": "code", + "source": "test_comments['flausch'] = test_comments['has_spans'].map({True: 'yes', False: 'no'})", + "id": "5fa67bbeb303ca3a", + "outputs": [], + "execution_count": 63 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T22:11:57.164479Z", + "start_time": "2025-06-27T22:11:57.150708Z" + } + }, + "cell_type": "code", + "source": "test_comments[[\"document\",\"comment_id\",\"flausch\"]].to_csv(f'./submissions/task1-predicted.csv', index=False)", + "id": "fd7679e665286b70", + "outputs": [], + "execution_count": 66 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T22:12:25.303426Z", + "start_time": "2025-06-27T22:12:24.850361Z" + } + }, + "cell_type": "code", + "source": "!cp './submissions/task1-predicted.csv' './submissions/subtask1_submission2.csv'", + "id": "bd9d8b153b8d27ed", + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "To disable this warning, you can either:\n", + "\t- Avoid using `tokenizers` before the fork if possible\n", + "\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n" + ] + } + ], + "execution_count": 68 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-06-27T22:12:43.388207Z", + "start_time": "2025-06-27T22:12:42.945847Z" + } + }, + "cell_type": "code", + "source": "!head -n 10 './submissions/task1-predicted.csv'", + "id": "5a2738b19dcd4292", + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "document,comment_id,flausch\r\n", + "NDY-004,1,no\r\n", + "NDY-004,2,yes\r\n", + "NDY-004,3,no\r\n", + "NDY-004,4,yes\r\n", + "NDY-004,5,yes\r\n", + "NDY-004,6,yes\r\n", + "NDY-004,7,no\r\n", + "NDY-004,8,yes\r\n", + "NDY-004,9,no\r\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "To disable this warning, you can either:\n", + "\t- Avoid using `tokenizers` before the fork if possible\n", + "\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n" + ] + } + ], + "execution_count": 70 + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +}