diff --git a/.ipynb_checkpoints/README-checkpoint.md b/.ipynb_checkpoints/README-checkpoint.md
deleted file mode 100644
index 2cd9cc0150f3bff606062cff485ec89d5da9e607..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/README-checkpoint.md
+++ /dev/null
@@ -1,75 +0,0 @@
-# DiscReT: Discourse Relation tagging
-The MELODI team submission for Task 3. 
-
-## Contents
-* **mappings**: a folder with the label conversions we implemented, and specifications on which test results are created from which of our models.
-* **pytorch_classifier.py**: the bare classifier using mBERT-base-cased and built on Pytorch
-* **make_adapter.py**: code to create a classifier adapter, based on [AdapterHub](https://github.com/adapter-hub/adapter-transformers)
-* **adapter_classifier.py**: classifier using one of the trained adapters (training the adapter beforehand is required)
-* **requirements.txt**: list of dependencies
-* **train_classifiers.sh**: shell script to train all classifiers
-* **configure.py**: list of training arguments
-* **utils.py**: various functions
-
-## Installation
-* Pull data from the [DISRPT Shared Task repository](https://github.com/disrpt/sharedtask2023): 
-	```
-	git clone https://github.com/disrpt/sharedtask2023
-	```
-
-* Install requirements, either:
-	```
-	pip install -r requirements.txt
-	```
-	or by making a conda environment:
-	``` 
-	conda env create -f environment.yml
-	conda activate discret 
-	```
-
-## Running classifiers
-
-The results are created by three different models:
-* the **bare classifier**: an mBERT-base-cased model  (max. 6 epochs)
-* the **classifier with A1 adapter**: an mBERT-base-cased model trained for 3 epochs with an adapter trained with mBERT-base-cased, for 15 epochs, with frozen layer 1
-* the **classifier with A1-3 adapter**: an mBERT-base-cased model trained for 4 epochs with an adapter trained with mBERT-base-cased, for 15 epochs, with frozen layers 1-3
-
-Run either the **train_classifiers.sh** script or each script individually (adapters must be trained beforehand):
-
-### Bare classifier
-``` 
-python pytorch_classifier.py \
-		--num_epochs 6 \
-		--data_path [PATH_TO_DATA]
-```
-### Adapter training
-
-A 1:
-``` 
-python make_adapter.py \
-		--num_epochs 15 \
-		--freeze_layers 'layer.1'
-		--data_path [PATH_TO_DATA]
-```
-A 1-3:
-``` 
-python make_adapter.py \
-		--num_epochs 15 \
-		--freeze_layers 'layer.1;layer.2;layer.3'
-		--data_path [PATH_TO_DATA]
-```
-### Classifiers with adapter
-with A 1:
-```
-python adapter_classifier.py \
-		--num_epochs 3 \
-		--data_path [PATH_TO_DATA] \
-		--adapter_name 'adapter_15-epochs_frozen-1'
-```
-with A 1-3:
-```
-python adapter_classifier.py \
-		--num_epochs 4 \
-		--data_path [PATH_TO_DATA] \
-		--adapter_name 'adapter_15-epochs_frozen-1-2-3'
-```
diff --git a/.ipynb_checkpoints/adapter_classifier-checkpoint.py b/.ipynb_checkpoints/adapter_classifier-checkpoint.py
deleted file mode 100644
index 28eb3b131afe8850da00edc504dfce4d22b00e5b..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/adapter_classifier-checkpoint.py
+++ /dev/null
@@ -1,214 +0,0 @@
-#!/usr/bin/env python
-# coding: utf-8
-
-import torch
-import numpy as np
-from transformers import AutoModel, AutoTokenizer, get_linear_schedule_with_warmup, AutoAdapterModel, AutoModelWithHeads, AutoConfig, TrainingArguments, Trainer, EvalPrediction, set_seed
-from torch import nn
-from torch.optim import AdamW
-from torch.utils.data import DataLoader
-import torch.nn.functional as F
-from torch.autograd import Variable
-from tqdm import tqdm
-import os
-from time import sleep
-from datetime import datetime
-import sys
-from sklearn.metrics import classification_report, accuracy_score
-from utils import open_file
-import pandas as pd
-import datasets
-from configure import parse_args
-from utils import *
-
-args = parse_args()
-now = datetime.now()
-dt_string = now.strftime("%d.%m.%y-%H:%M:%S")
-adapter_name = args.adapter_name
-mappings, inv_mappings = open_mappings(args.mappings_file)
-substitutions_file = 'mappings/substitutions.txt'
-tokenizer = AutoTokenizer.from_pretrained(args.transformer_model)
-
-# we are saving the test results of specific epochs
-# specific_results = open_specific_results('mappings/specific_results.txt')
-# if '1-2-3' in adapter_name or 'layer1;layer2;layer3' in adapter_name:
-#     specific_results = list(specific_results['A1_3'][args.num_epochs])
-# else:
-#     specific_results = list(specific_results['A1'][args.num_epochs])
-
-set_seed(42)
-
-print('Train classifier with adapter\n')
-print('Adapter name:', adapter_name)
-print('Model:', args.transformer_model)
-print('Batch size:', args.batch_size * args.gradient_accumulation_steps)
-print('Num epochs:', args.num_epochs)
-
-# Open mappings
-mappings, inv_mappings = open_mappings(args.mappings_file)
-
-# Open sentences
-train_sentences, dev_dict_sentences, test_dict_sentences = open_sentences(args.data_path, mappings)
-
-# make pandas dataframes
-file_header = ['text', 'labels']
-
-train_df = pd.DataFrame([[' '.join(x[-2]), x[-1]] for x in train_sentences], 
-                        columns =file_header)
-train_df = train_df.sample(frac = 1) # shuffle the train
-
-dev_dict_df = {corpus : pd.DataFrame([[' '.join(x[-2]), x[-1]] 
-                                      for x in sents], 
-                                     columns = file_header)
-               for corpus, sents in dev_dict_sentences.items()}
-
-test_dict_df = {corpus : pd.DataFrame([[' '.join(x[-2]), x[-1]] 
-                                      for x in sents], 
-                                     columns = file_header)
-               for corpus, sents in test_dict_sentences.items()}
-
-#Make datasets from dataframes
-train_dataset = datasets.Dataset.from_pandas(train_df)
-dev_dict_dataset  = {corpus:datasets.Dataset.from_pandas(dev_df) 
-                     for corpus, dev_df in dev_dict_df.items()}
-test_dict_dataset = {corpus:datasets.Dataset.from_pandas(dev_df) 
-                     for corpus, dev_df in test_dict_df.items()}
-
-# get number of labels
-num_labels = len(set([int(x.strip()) 
-                      for x in train_df['labels'].to_string(index=False).split('\n')])) +1
-
-# Encode the data
-train_dataset = train_dataset.map(encode_batch, batched=True)
-train_dataset.set_format(type="torch", columns=["input_ids", "attention_mask", "labels"])
-
-encoded_dev_dataset = {}
-for corpus in dev_dict_dataset:
-    temp = dev_dict_dataset[corpus].map(encode_batch, batched=True)
-    temp.set_format(type="torch", columns=["input_ids", "attention_mask", "labels"])
-    encoded_dev_dataset[corpus] = temp
-
-encoded_test_dataset = {}
-for corpus in test_dict_dataset:
-    temp = test_dict_dataset[corpus].map(encode_batch, batched=True)
-    temp.set_format(type="torch", columns=["input_ids", "attention_mask", "labels"])
-    encoded_test_dataset[corpus] = temp
-
-# ===============================
-# Training params
-# ===============================
-
-model = AutoAdapterModel.from_pretrained(args.transformer_model)
-active_adapter = model.load_adapter(adapter_name,
-                                  config = adapter_name + "/adapter_config.json")
-model.set_active_adapters(active_adapter)
-
-
-training_args = TrainingArguments(
-    learning_rate    = 2e-5, #1e-4,
-    num_train_epochs = args.num_epochs,
-    per_device_train_batch_size = args.batch_size,
-    per_device_eval_batch_size  = args.batch_size,
-    gradient_accumulation_steps = args.gradient_accumulation_steps,
-    logging_steps  = (len(train_sentences)/(args.batch_size * args.gradient_accumulation_steps)),
-    output_dir = "./training_output",
-    overwrite_output_dir =True,
-    remove_unused_columns=False,
-)
-
-
-trainer = Trainer(
-    model = model,
-    args  = training_args,
-    train_dataset = train_dataset
-)
-
-# Freeze layers in the classifier if desired
-if args.freeze_layers != '':
-    layers_to_freeze = args.freeze_layers.split(';')
-    for name, param in model.named_parameters():
-        if any(x in name for x in layers_to_freeze):
-            param.requires_grad = False
-
-
-# ===============================
-# Start the training 🚀
-# ===============================
-
-print('Start training...')
-trainer.train()
-
-# Dev results
-
-print('\nDev results:')
-for corpus in encoded_dev_dataset:
-    print()
-    dev_results = get_predictions_huggingface(trainer, corpus, 
-                                    encoded_dev_dataset[corpus])
-    
-    
-    path_results = 'results/dev/' + adapter_name + '_' + str(args.num_epochs)
-    if not os.path.exists(path_results):
-        os.makedirs(path_results)
-                
-    print_results_to_file(corpus, 
-                          dev_dict_sentences[corpus], 
-                          dev_results,
-                          inv_mappings, 
-                          substitutions_file, 
-                          path_results)
-
-# Test results
-
-print('\ntest results:')
-for corpus in encoded_test_dataset:
-    print()
-    test_results = get_predictions_huggingface(trainer, 
-                                               corpus, 
-                                               encoded_test_dataset[corpus])
-    
-    
-    path_results = 'results/test/' + adapter_name + '_' + str(args.num_epochs)
-    if not os.path.exists(path_results):
-        os.makedirs(path_results)
-                
-    print_results_to_file(corpus, 
-                          test_dict_sentences[corpus], 
-                          test_results,
-                          inv_mappings, 
-                          substitutions_file, 
-                          path_results)
-
-
-
-#         for corpus in test_dict_dataloader:
-#             test_results = get_predictions(model, 
-#                                 corpus, 
-#                                 test_dict_dataloader[corpus])
-            
-#             path_results = 'results/test/pytorch' + str(epoch_num+1)
-#             if not os.path.exists(path_results):
-#                 os.makedirs(path_results)
-                
-#             print_results_to_file(corpus, 
-#                                 test_dict_sentences[corpus], 
-#                                 test_results,
-#                                 inv_mappings, substitutions_file, 
-#                                 path_results)    
-    
-    
-    
-    
-    
-    
-
-# Save specific test results
-
-# print('\nTest results:')
-# for corpus in encoded_test_dataset:
-#     print()
-#     test_results = get_predictions_huggingface(trainer, corpus, 
-#                                     encoded_test_dataset[corpus])
-# 
-#     print_results_to_file(corpus, test_dict_sentences[corpus], test_results, 
-#                           inv_mappings, substitutions_file)
\ No newline at end of file
diff --git a/.ipynb_checkpoints/check_labels-checkpoint.ipynb b/.ipynb_checkpoints/check_labels-checkpoint.ipynb
deleted file mode 100644
index 1305fdb3572d3dcd57d1af2cbf904babfd1338df..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/check_labels-checkpoint.ipynb
+++ /dev/null
@@ -1,288 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import os"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "num_labels = 134 + 1\n",
-    "\n",
-    "temp = {}\n",
-    "mappings = {}\n",
-    "subs = {}\n",
-    "\n",
-    "\n",
-    "with open('mappings/mappings_substitutions.tsv', 'r') as f:\n",
-    "    counter = -1\n",
-    "    for line in f:\n",
-    "        counter += 1\n",
-    "        if counter < num_labels:\n",
-    "            mappings[line.split(\"\\t\")[0]] = int(line.strip().split(\"\\t\")[1])\n",
-    "        else:\n",
-    "            temp[line.split(\"\\t\")[0]] = int(line.strip().split(\"\\t\")[1])\n",
-    "        \n",
-    "inv_mappings = {v:k for k, v in mappings.items()}\n",
-    "subs = {k:inv_mappings[v] for k, v in temp.items()}"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 16,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "def read_corpus(file):\n",
-    "    labels = []\n",
-    "\n",
-    "    with open(file, 'r') as f:\n",
-    "        next(f)\n",
-    "        for line in f:\n",
-    "            labels.append(line.strip().split('\\t')[-1])\n",
-    "    return labels"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 17,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "path = '/users/melodi/emetheni/clean_data'\n",
-    "list_corpora = [x for x in os.listdir(path)]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 18,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "\n",
-      "==spa.rst.sctb==\n",
-      "['antithesis', 'attribution', 'background', 'cause', 'circumstance', 'concession', 'condition', 'conjunction', 'contrast', 'disjunction', 'elaboration', 'evaluation', 'evidence', 'interpretation', 'justify', 'list', 'means', 'motivation', 'preparation', 'purpose', 'restatement', 'result', 'sequence', 'summary']\n",
-      "\n",
-      "\n",
-      "==tha.pdtb.tdtb==\n",
-      "['Comparison.Concession', 'Comparison.Contrast', 'Comparison.Similarity', 'Contingency.Cause', 'Contingency.Cause+Belief', 'Contingency.Cause+SpeechAct', 'Contingency.Condition', 'Contingency.Negative-Condition', 'Contingency.Negative-Condition+SpeechAct', 'Contingency.Purpose', 'Expansion.Conjunction', 'Expansion.Disjunction', 'Expansion.Equivalence', 'Expansion.Exception', 'Expansion.GenExpansion', 'Expansion.Instantiation', 'Expansion.Level-of-detail', 'Expansion.Substitution', 'Temporal.Asynchronous', 'Temporal.Synchronous']\n",
-      "\n",
-      "\n",
-      "==rus.rst.rrt==\n",
-      "['antithesis', 'attribution', 'background', 'cause', 'cause-effect', 'comparison', 'concession', 'conclusion', 'condition', 'contrast', 'effect', 'elaboration', 'evaluation', 'evidence', 'interpretation-evaluation', 'joint', 'motivation', 'preparation', 'purpose', 'restatement', 'sequence', 'solutionhood']\n",
-      "\n",
-      "\n",
-      "==zho.rst.gcdt==\n",
-      "['adversative-antithesis', 'adversative-concession', 'adversative-contrast', 'attribution-negative', 'attribution-positive', 'causal-cause', 'causal-result', 'context-background', 'context-circumstance', 'contingency-condition', 'elaboration-additional', 'elaboration-attribute', 'evaluation-comment', 'explanation-evidence', 'explanation-justify', 'explanation-motivation', 'joint-disjunction', 'joint-list', 'joint-other', 'joint-sequence', 'mode-manner', 'mode-means', 'organization-heading', 'organization-phatic', 'organization-preparation', 'purpose-attribute', 'purpose-goal', 'restatement-partial', 'restatement-repetition', 'topic-question', 'topic-solutionhood']\n",
-      "\n",
-      "\n",
-      "==fra.sdrt.annodis==\n",
-      "['alternation', 'attribution', 'background', 'comment', 'conditional', 'continuation', 'contrast', 'e-elaboration', 'elaboration', 'explanation', 'explanation*', 'flashback', 'frame', 'goal', 'narration', 'parallel', 'result', 'temploc']\n",
-      "\n",
-      "\n",
-      "==por.rst.cstn==\n",
-      "['antithesis', 'attribution', 'background', 'circumstance', 'comparison', 'concession', 'conclusion', 'condition', 'contrast', 'elaboration', 'enablement', 'evaluation', 'evidence', 'explanation', 'interpretation', 'joint', 'justify', 'list', 'means', 'motivation', 'nonvolitional-cause', 'nonvolitional-cause-e', 'nonvolitional-result', 'nonvolitional-result-e', 'otherwise', 'parenthetical', 'purpose', 'restatement', 'sequence', 'solutionhood', 'volitional-cause', 'volitional-result']\n",
-      "\n",
-      "\n",
-      "==eng.sdrt.stac==\n",
-      "['Acknowledgement', 'Alternation', 'Background', 'Clarification_question', 'Comment', 'Conditional', 'Continuation', 'Contrast', 'Correction', 'Elaboration', 'Explanation', 'Narration', 'Parallel', 'Q_Elab', 'Question_answer_pair', 'Result']\n",
-      "\n",
-      "\n",
-      "==eus.rst.ert==\n",
-      "['antithesis', 'background', 'cause', 'circumstance', 'concession', 'condition', 'conjunction', 'contrast', 'disjunction', 'elaboration', 'enablement', 'evaluation', 'evidence', 'interpretation', 'joint', 'justify', 'list', 'means', 'motivation', 'otherwise', 'preparation', 'purpose', 'restatement', 'result', 'sequence', 'solutionhood', 'summary', 'unconditional', 'unless']\n",
-      "\n",
-      "\n",
-      "==zho.dep.scidtb==\n",
-      "['ROOT', 'attribution', 'bg-compare', 'bg-general', 'bg-goal', 'cause', 'comparison', 'condition', 'contrast', 'elab-addition', 'elab-aspect', 'elab-enumember', 'elab-process_step', 'enablement', 'evaluation', 'exp-evidence', 'exp-reason', 'joint', 'manner-means', 'progression', 'result', 'summary', 'temporal']\n",
-      "\n",
-      "\n",
-      "==eng.pdtb.pdtb==\n",
-      "['Comparison.Concession', 'Comparison.Concession+SpeechAct', 'Comparison.Contrast', 'Comparison.Similarity', 'Contingency.Cause', 'Contingency.Cause+Belief', 'Contingency.Cause+SpeechAct', 'Contingency.Condition', 'Contingency.Condition+SpeechAct', 'Contingency.Negative-cause', 'Contingency.Negative-condition', 'Contingency.Purpose', 'Expansion.Conjunction', 'Expansion.Disjunction', 'Expansion.Equivalence', 'Expansion.Exception', 'Expansion.Instantiation', 'Expansion.Level-of-detail', 'Expansion.Manner', 'Expansion.Substitution', 'Hypophora', 'Temporal.Asynchronous', 'Temporal.Synchronous']\n",
-      "topic eng.pdtb.pdtb \n",
-      "\n",
-      "\n",
-      "==deu.rst.pcc==\n",
-      "['antithesis', 'background', 'cause', 'circumstance', 'concession', 'condition', 'conjunction', 'contrast', 'disjunction', 'e-elaboration', 'elaboration', 'evaluation-n', 'evaluation-s', 'evidence', 'interpretation', 'joint', 'list', 'means', 'preparation', 'purpose', 'reason', 'restatement', 'result', 'sequence', 'solutionhood', 'summary']\n",
-      "\n",
-      "\n",
-      "==eng.rst.rstdt==\n",
-      "['attribution', 'background', 'cause', 'comparison', 'condition', 'contrast', 'elaboration', 'enablement', 'evaluation', 'explanation', 'joint', 'manner-means', 'summary', 'temporal', 'textual-organization', 'topic-change', 'topic-comment']\n",
-      "acknowledgement eng.rst.rstdt \n",
-      "\n",
-      "\n",
-      "==zho.rst.sctb==\n",
-      "['antithesis', 'attribution', 'background', 'cause', 'circumstance', 'concession', 'condition', 'conjunction', 'contrast', 'disjunction', 'elaboration', 'enablement', 'evaluation', 'evidence', 'interpretation', 'justify', 'list', 'means', 'motivation', 'preparation', 'purpose', 'restatement', 'result', 'sequence', 'solutionhood', 'summary']\n",
-      "\n",
-      "\n",
-      "==nld.rst.nldt==\n",
-      "['antithesis', 'background', 'circumstance', 'concession', 'condition', 'conjunction', 'contrast', 'disjunction', 'elaboration', 'enablement', 'evaluation', 'evidence', 'interpretation', 'joint', 'justify', 'list', 'means', 'motivation', 'nonvolitional-cause', 'nonvolitional-result', 'otherwise', 'preparation', 'purpose', 'restatement', 'restatement-mn', 'sequence', 'solutionhood', 'summary', 'unconditional', 'unless', 'volitional-cause', 'volitional-result']\n",
-      "\n",
-      "\n",
-      "==tur.pdtb.tdb==\n",
-      "['Comparison.Concession', 'Comparison.Concession+SpeechAct', 'Comparison.Contrast', 'Comparison.Degree', 'Comparison.Similarity', 'Contingency.Cause', 'Contingency.Cause+Belief', 'Contingency.Cause+SpeechAct', 'Contingency.Condition', 'Contingency.Negative-condition', 'Contingency.Purpose', 'Expansion.Conjunction', 'Expansion.Correction', 'Expansion.Disjunction', 'Expansion.Equivalence', 'Expansion.Exception', 'Expansion.Instantiation', 'Expansion.Level-of-detail', 'Expansion.Manner', 'Expansion.Substitution', 'Hypophora', 'Temporal.Asynchronous', 'Temporal.Synchronous']\n",
-      "\n",
-      "\n",
-      "==spa.rst.rststb==\n",
-      "['alternative', 'antithesis', 'background', 'cause', 'circumstance', 'concession', 'condition', 'conjunction', 'contrast', 'disjunction', 'elaboration', 'enablement', 'evaluation', 'evidence', 'interpretation', 'joint', 'justify', 'list', 'means', 'motivation', 'preparation', 'purpose', 'restatement', 'result', 'sequence', 'solutionhood', 'summary', 'unless']\n",
-      "\n",
-      "\n",
-      "==por.pdtb.tedm==\n",
-      "['Comparison.Concession', 'Comparison.Contrast', 'Comparison.Similarity', 'Contingency.Cause', 'Contingency.Cause+Belief', 'Contingency.Condition', 'Contingency.Condition+SpeechAct', 'Contingency.Purpose', 'Expansion.Conjunction', 'Expansion.Disjunction', 'Expansion.Equivalence', 'Expansion.Instantiation', 'Expansion.Level-of-detail', 'Expansion.Manner', 'Expansion.Substitution', 'Hypophora', 'Temporal.Asynchronous', 'Temporal.Synchronous']\n",
-      "\n",
-      "\n",
-      "==ita.pdtb.luna==\n",
-      "['', 'Comparison', 'Comparison.Concession', 'Comparison.Contrast', 'Contingency.Cause', 'Contingency.Condition', 'Contingency.Goal', 'Expansion.Alternative', 'Expansion.Conjunction', 'Expansion.Instantiation', 'Expansion.Restatement', 'Interrupted', 'Repetition', 'Temporal.Asynchronous', 'Temporal.Synchrony']\n",
-      "parallel ita.pdtb.luna \n",
-      "\n",
-      "\n",
-      "==fas.rst.prstc==\n",
-      "['attribution', 'background', 'cause', 'comparison', 'condition', 'contrast', 'elaboration', 'enablement', 'evaluation', 'explanation', 'joint', 'manner-means', 'summary', 'temporal', 'topic-change', 'topic-comment', 'topic-drift']\n",
-      "\n",
-      "\n",
-      "==por.pdtb.crpc==\n",
-      "['Comparison', 'Comparison.Concession', 'Comparison.Contrast', 'Comparison.Similarity', 'Contingency.Cause', 'Contingency.Condition', 'Contingency.Negative', 'Contingency.Purpose', 'Expansion.Conjunction', 'Expansion.Disjunction', 'Expansion.Equivalence', 'Expansion.Exception', 'Expansion.Instantiation', 'Expansion.Level', 'Expansion.Manner', 'Expansion.Substitution', 'Hypophora', 'QAP', 'QAP.Hypophora', 'Temporal', 'Temporal.Asynchronous', 'Temporal.Synchronous']\n",
-      "\n",
-      "\n",
-      "==zho.pdtb.cdtb==\n",
-      "['Alternative', 'Causation', 'Conditional', 'Conjunction', 'Contrast', 'Expansion', 'Progression', 'Purpose', 'Temporal']\n",
-      "preparation zho.pdtb.cdtb \n",
-      "\n",
-      "\n",
-      "==eng.pdtb.tedm==\n",
-      "['Comparison.Concession', 'Comparison.Contrast', 'Comparison.Similarity', 'Contingency.Cause', 'Contingency.Cause+Belief', 'Contingency.Cause+SpeechAct', 'Contingency.Condition', 'Contingency.Purpose', 'Expansion.Conjunction', 'Expansion.Disjunction', 'Expansion.Equivalence', 'Expansion.Instantiation', 'Expansion.Level-of-detail', 'Expansion.Manner', 'Expansion.Substitution', 'Hypophora', 'Temporal.Asynchronous', 'Temporal.Synchronous']\n",
-      "parallel eng.pdtb.tedm \n",
-      "\n",
-      "\n",
-      "==eng.rst.gum==\n",
-      "['adversative', 'attribution', 'causal', 'context', 'contingency', 'elaboration', 'evaluation', 'explanation', 'joint', 'mode', 'organization', 'purpose', 'restatement', 'topic']\n",
-      "attribution-positive eng.rst.gum \n",
-      "\n",
-      "\n",
-      "==eng.dep.covdtb==\n",
-      "['ATTRIBUTION', 'BACKGROUND', 'CAUSE-RESULT', 'COMPARISON', 'CONDITION', 'ELABORATION', 'ENABLEMENT', 'FINDINGS', 'JOINT', 'MANNER-MEANS', 'TEMPORAL', 'TEXTUAL-ORGANIZATION']\n",
-      "interpretation eng.dep.covdtb \n",
-      "\n",
-      "\n",
-      "==tur.pdtb.tedm==\n",
-      "['Comparison.Concession', 'Comparison.Concession+SpeechAct', 'Comparison.Contrast', 'Comparison.Similarity', 'Contingency.Cause', 'Contingency.Cause+Belief', 'Contingency.Cause+SpeechAct', 'Contingency.Condition', 'Contingency.Negative-condition', 'Contingency.Purpose', 'Expansion', 'Expansion.Conjunction', 'Expansion.Disjunction', 'Expansion.Equivalence', 'Expansion.Exception', 'Expansion.Instantiation', 'Expansion.Level-of-detail', 'Expansion.Manner', 'Expansion.Substitution', 'Hypophora', 'Temporal.Asynchronous', 'Temporal.Synchronous']\n",
-      "\n",
-      "\n",
-      "==eng.dep.scidtb==\n",
-      "['attribution', 'bg-compare', 'bg-general', 'bg-goal', 'cause', 'comparison', 'condition', 'contrast', 'elab-addition', 'elab-aspect', 'elab-definition', 'elab-enumember', 'elab-example', 'elab-process_step', 'enablement', 'evaluation', 'exp-evidence', 'exp-reason', 'joint', 'manner-means', 'progression', 'result', 'summary', 'temporal']\n"
-     ]
-    }
-   ],
-   "source": [
-    "for corpus in list_corpora:\n",
-    "    \n",
-    "    labels = read_corpus(path + '/' + corpus + '/' + corpus + '_dev.rels')\n",
-    "    labels = read_corpus(path + '/' + corpus + '/' + corpus + '_test.rels')\n",
-    "    try:\n",
-    "        labels = read_corpus(path + '/' + corpus + '/' + corpus + '_train.rels')\n",
-    "    except:\n",
-    "        pass\n",
-    "\n",
-    "    labels = set(labels)\n",
-    "    print('\\n')\n",
-    "    print(\"==\" + corpus + \"==\")\n",
-    "    print(sorted(labels))\n",
-    "\n",
-    "#     for label in labels:\n",
-    "#         if label not in mappings:\n",
-    "#             if label.lower() in mappings:\n",
-    "#                 print(label + '\\t' + corpus + '\\t' + label.lower())\n",
-    "#             elif label in subs:\n",
-    "#                 print(label + '\\t' + corpus + '\\t' + subs[label])\n",
-    "#             elif label.lower() in subs:\n",
-    "#                 print(label + '\\t' + corpus + '\\t' + subs[label.lower()])\n",
-    "#             else:\n",
-    "#                 print('AAAAAAAAAAAAAAAAAAAA', label, corpus)\n",
-    "\n",
-    "\n",
-    "    test_labels = read_corpus('results/test/A_15-epochs_frozen-1_3/' + corpus +'.tsv')\n",
-    "    test_labels = set(test_labels)\n",
-    "    \n",
-    "    for l in test_labels:\n",
-    "        if l not in labels:\n",
-    "            temp = ''\n",
-    "            \n",
-    "            if l.lower() in labels:\n",
-    "                temp = l.lower()\n",
-    "            elif l.lower() in inv_mappings:\n",
-    "                temp = inv_mappings[l.lower()]\n",
-    "            elif l in subs:\n",
-    "                temp = subs[l]\n",
-    "            elif l.lower() in subs:\n",
-    "                temp = subs[l.lower()]\n",
-    "                \n",
-    "            try:\n",
-    "                assert temp in test_labels\n",
-    "                print(l + ' ' + corpus + ' ' +temp)\n",
-    "            except:   \n",
-    "                print(l + ' ' + corpus + ' ' +temp)\n",
-    "            "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.6.7"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 4
-}
diff --git a/.ipynb_checkpoints/configure-checkpoint.py b/.ipynb_checkpoints/configure-checkpoint.py
deleted file mode 100644
index 89e8c0fb51a766c90915459f82e5335db16486ec..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/configure-checkpoint.py
+++ /dev/null
@@ -1,54 +0,0 @@
-import argparse
-import sys
-
-def parse_args():
-    """
-    Parse input arguments.
-    """
-    parser = argparse.ArgumentParser()
-      
-    parser.add_argument("--data_path", default="../sharedtask2023/data", type=str, 
-                        help="The path to the shared task data file from Github.")
-    
-    parser.add_argument("--mappings_file", default="mappings/mappings_substitutions.tsv", type=str, 
-                        help="The mappings file for all relations.")
-
-    # transformer model
-    parser.add_argument("--transformer_model", default="bert-base-multilingual-cased", type=str, 
-                        help="Model used, default: bert-multilingual-base-cased")
-
-    # Number of training epochs
-    parser.add_argument("--num_epochs", default=4, type=int, 
-                        help="Number of training epochs. Default: 4")
-    
-    # Number of gradient accumulation steps
-    parser.add_argument("--gradient_accumulation_steps", default=16, type=int, 
-                        help="Number of gradient accumulation steps. Default: 16")
-    
-    # Dropout
-    parser.add_argument("--dropout", default=0.1, type=float, 
-                        help="Dropout.")
-    
-    # Batch size
-    parser.add_argument("--batch_size", default=8, type=int, 
-                        help="With CUDA: max. 8, without: max. 16. Default: 8")
-    
-    # Use CUDA
-    parser.add_argument("--use_cuda", default='yes', type=str, 
-                        help="Use CUDA [yes/no]. Careful of batch size!")   
-    
-    # freeze layers
-    parser.add_argument("--freeze_layers", default='', type=str, 
-                        help="List of layer(s) to freeze, a str separated by ;. Example: 'layer.1;layer.2'")   
-    
-    # load adapter
-    parser.add_argument("--adapter_name", default='', type=str, 
-                        help="If you want to use an adapter")   
-    
-    # normalize direction
-    parser.add_argument("--normalize_direction", default='yes', type=str, 
-                        help="Change order of sentences when the direction of relations is 1<2 to 2>1.") 
-            
-    args = parser.parse_args()
-
-    return args
\ No newline at end of file
diff --git a/.ipynb_checkpoints/connectives-checkpoint.json b/.ipynb_checkpoints/connectives-checkpoint.json
deleted file mode 100644
index 84ee83c9cb4885da6d5d90f9271cfa6359b71de5..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/connectives-checkpoint.json
+++ /dev/null
@@ -1 +0,0 @@
-{"addition": ["in addition", "furthermore", "also", "additionally", "again", "regarding", "besides", "then", "similarly", "moreover", "equally important", "too", "further", "of equal importance", "another", "correspondingly", "indeed", "and"], "time": ["at last", "ultimately", "meanwhile", "subsequently", "last", "thereafter", "presently", "afterward", "when", "a minute later", "in the meantime", "then", "after a short time", "soon", "on the following day", "lastly", "finally", "later", "at length", "next", "now", "the next week"], "sequence": ["hence", "followed by", "also", "after that", "another", "afterwards", "subsequently", "to begin with", "last of all", "eventually", "as soon as", "additionally", "to go on to", "gradually", "when", "later (on)", "third", "finally moreover", "in the end", "then", "before", "first", "second", "finally", "after", "from here on", "next"], "example": ["in the same way", "for example", "as exemplified by", "furthermore", "notably", "as follows", "such as", "for instance", "especially", "particularly", "similarly", "to be specific", "in particular", "moreover", "just as important", "mainly", "including", "namely", "to illustrate"], "result": ["in that case", "hence", "otherwise", "as a result of", "for this reason", "evidence illustrates that", "that implies", "the result is/results are", "thus", "consequently", "so", "accordingly", "since", "resulting from", "as a consequence", "the consequence is", "therefore", "as a result", "this suggests that", "it follows that", "because of this", "it can be seen", "owing to x"], "purpose": ["for this reason", "with this in mind", "for this purpose"], "comparison": ["in the same manner", "similarly", "as so", "like"], "contrast": ["by contrast", "on the other hand", "otherwise", "although", "but", "yet", "in comparison", "nevertheless", "still", "nonetheless", "in contrast", "conversely", "however", "whereas", "alternatively", "on the contrary", "in spite of this", "or", "contrasting", "and yet", "instead", "in fact", "actually"], "summary": ["in summary", "as i have said", "hence", "altogether", "in conclusion", "briefly", "overall", "to repeat", "finally", "on the whole", "therefore", "as seen", "to sum up", "thus to summarise", "to conclude", "in short"], "rephrasing": ["in contrast", "better", "or", "in view of this", "rather", "in other terms"]}
\ No newline at end of file
diff --git a/.ipynb_checkpoints/conversions-all-danger-checkpoint.sh b/.ipynb_checkpoints/conversions-all-danger-checkpoint.sh
deleted file mode 100644
index d5a413072aff47d9b5f67cf3ccb6dfc111017b02..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/conversions-all-danger-checkpoint.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-python convert-leftover-labels.py results/dev/pytorch-1
-python convert-leftover-labels.py results/dev/pytorch-2
-python convert-leftover-labels.py results/dev/pytorch-3
-python convert-leftover-labels.py results/dev/pytorch-4
-python convert-leftover-labels.py results/dev/pytorch-5
-python convert-leftover-labels.py results/dev/pytorch-6
-python convert-leftover-labels.py results/dev/A_15-epochs_frozen-1_3
-python convert-leftover-labels.py results/dev/A_15-epochs_frozen-1-2-3_4
-
-
-python convert-leftover-labels.py results/test/pytorch1
-python convert-leftover-labels.py results/test/pytorch2
-python convert-leftover-labels.py results/test/pytorch3
-python convert-leftover-labels.py results/test/pytorch4
-python convert-leftover-labels.py results/test/pytorch5
-python convert-leftover-labels.py results/test/pytorch6
-python convert-leftover-labels.py results/test/A_15-epochs_frozen-1_3
-python convert-leftover-labels.py results/test/A_15-epochs_frozen-1-2-3_4
\ No newline at end of file
diff --git a/.ipynb_checkpoints/convert-leftover-labels-checkpoint.py b/.ipynb_checkpoints/convert-leftover-labels-checkpoint.py
deleted file mode 100644
index e612f98d757b3d487ed47d0405f9f23de6d66ead..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/convert-leftover-labels-checkpoint.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# an emergency script to see wtf is going on 
-
-import os
-import sys
-
-print(sys.argv[1])
-# open substitutions
-
-subs = {}
-
-with open('mappings/substitutions.txt', 'r') as f:
-    next(f)
-    for line in f:
-        l = line.strip().split('\t')
-        if not l[1] in subs:
-            subs[l[1]] = {}
-        # corpus: {bad-label: good-label}
-        subs[l[1]][l[2]] = l[0]
-        
-# open target folder
-
-files = [x for x in os.listdir(str(sys.argv[1])) if x.endswith('.tsv')]
-
-for file in files:
-    corpus = file[:-4]
-    
-    og_lines = []
-    with open(sys.argv[1] + '/' + file, 'r', encoding = 'utf-8') as f:
-        next(f)
-        for line in f:
-            if not line.strip() == '':
-                l = line.strip().split('\t')
-                if l[-1] in subs[corpus]:
-    #                 print(l)
-                    line = '\t'.join(l[:-1] + [subs[corpus][l[-1]]])
-    #                 print(line)
-    #                 print()
-                og_lines.append(line)
-            
-    # delete file
-    os.remove(sys.argv[1] + '/' + file)
-    
-    with open(sys.argv[1] + '/' + file, 'w+', encoding='utf-8') as nf:
-        nf.write('doc	unit1_toks	unit2_toks	unit1_txt	unit2_txt	s1_toks	s2_toks	unit1_sent	unit2_sent	dir	orig_label	predicted_label\n')
-        for line in og_lines:
-            nf.write(line.strip())
-            nf.write('\n')
\ No newline at end of file
diff --git a/.ipynb_checkpoints/environment-checkpoint.yml b/.ipynb_checkpoints/environment-checkpoint.yml
deleted file mode 100644
index bcc7776ff1b87b6e69601ce6ecaf1a138f07b1bb..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/environment-checkpoint.yml
+++ /dev/null
@@ -1,88 +0,0 @@
-name: discret
-channels:
-  - defaults
-dependencies:
-  - _libgcc_mutex=0.1=main
-  - _openmp_mutex=5.1=1_gnu
-  - ca-certificates=2023.01.10=h06a4308_0
-  - ld_impl_linux-64=2.38=h1181459_1
-  - libffi=3.4.4=h6a678d5_0
-  - libgcc-ng=11.2.0=h1234567_1
-  - libgomp=11.2.0=h1234567_1
-  - libstdcxx-ng=11.2.0=h1234567_1
-  - ncurses=6.4=h6a678d5_0
-  - openssl=1.1.1t=h7f8727e_0
-  - pip=23.0.1=py38h06a4308_0
-  - python=3.8.16=h7a1cb2a_3
-  - readline=8.2=h5eee18b_0
-  - setuptools=66.0.0=py38h06a4308_0
-  - sqlite=3.41.2=h5eee18b_0
-  - tk=8.6.12=h1ccaba5_0
-  - wheel=0.38.4=py38h06a4308_0
-  - xz=5.4.2=h5eee18b_0
-  - zlib=1.2.13=h5eee18b_0
-  - pip:
-    - adapter-transformers==3.0.1
-    - aiohttp==3.8.4
-    - aiosignal==1.3.1
-    - async-timeout==4.0.2
-    - attrs==23.1.0
-    - certifi==2023.5.7
-    - charset-normalizer==3.1.0
-    - click==8.1.3
-    - cmake==3.26.3
-    - datasets==2.4.0
-    - dill==0.3.5.1
-    - filelock==3.12.0
-    - frozenlist==1.3.3
-    - fsspec==2023.5.0
-    - huggingface-hub==0.14.1
-    - idna==3.4
-    - jinja2==3.1.2
-    - joblib==1.2.0
-    - lit==16.0.3
-    - markupsafe==2.1.2
-    - mpmath==1.3.0
-    - multidict==6.0.4
-    - multiprocess==0.70.13
-    - networkx==3.1
-    - numpy==1.24.3
-    - nvidia-cublas-cu11==11.10.3.66
-    - nvidia-cuda-cupti-cu11==11.7.101
-    - nvidia-cuda-nvrtc-cu11==11.7.99
-    - nvidia-cuda-runtime-cu11==11.7.99
-    - nvidia-cudnn-cu11==8.5.0.96
-    - nvidia-cufft-cu11==10.9.0.58
-    - nvidia-curand-cu11==10.2.10.91
-    - nvidia-cusolver-cu11==11.4.0.1
-    - nvidia-cusparse-cu11==11.7.4.91
-    - nvidia-nccl-cu11==2.14.3
-    - nvidia-nvtx-cu11==11.7.91
-    - packaging==23.1
-    - pandas==2.0.1
-    - pillow==9.5.0
-    - pyarrow==12.0.0
-    - python-dateutil==2.8.2
-    - pytz==2023.3
-    - pyyaml==6.0
-    - regex==2023.5.5
-    - requests==2.30.0
-    - responses==0.18.0
-    - sacremoses==0.0.53
-    - scikit-learn==1.2.2
-    - scipy==1.10.1
-    - six==1.16.0
-    - sympy==1.12
-    - threadpoolctl==3.1.0
-    - tokenizers==0.12.1
-    - torch==2.0.1
-    - torchaudio==2.0.2
-    - torchvision==0.15.2
-    - tqdm==4.65.0
-    - transformers==4.18.0
-    - triton==2.0.0
-    - typing-extensions==4.5.0
-    - tzdata==2023.3
-    - urllib3==2.0.2
-    - xxhash==3.2.0
-    - yarl==1.9.2ename
\ No newline at end of file
diff --git a/.ipynb_checkpoints/huggingface_classifier-checkpoint.py b/.ipynb_checkpoints/huggingface_classifier-checkpoint.py
deleted file mode 100644
index 7d90d9acd4232a9bb52a2ffdeb346f5596bc452e..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/huggingface_classifier-checkpoint.py
+++ /dev/null
@@ -1,203 +0,0 @@
-#!/usr/bin/env python
-# coding: utf-8
-
-import torch
-import numpy as np
-from transformers import AutoModel, AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, AutoConfig, TrainingArguments, Trainer, EvalPrediction, set_seed
-from torch import nn
-from torch.optim import AdamW
-from torch.utils.data import DataLoader
-import torch.nn.functional as F
-from torch.autograd import Variable
-from tqdm import tqdm
-import os
-from time import sleep
-from datetime import datetime
-import sys
-from sklearn.metrics import classification_report, accuracy_score
-from utils import open_file
-import pandas as pd
-import datasets
-from configure import parse_args
-from utils import *
-
-args = parse_args()
-now = datetime.now()
-dt_string = now.strftime("%d.%m.%y-%H:%M:%S")
-save_name = args.mappings_file.split('-')[-1]
-mappings, inv_mappings = open_mappings(args.mappings_file)
-substitutions_file = 'mappings/substitutions.txt'
-tokenizer = AutoTokenizer.from_pretrained(args.transformer_model)
-
-
-set_seed(42)
-
-print('Model:', args.transformer_model)
-print('Batch size:', args.batch_size * args.gradient_accumulation_steps)
-print('Num epochs:', args.num_epochs)
-
-# Open mappings
-mappings, inv_mappings = open_mappings(args.mappings_file)
-
-# Open sentences
-train_sentences, dev_dict_sentences, test_dict_sentences = open_sentences(args.data_path, mappings)
-
-# make pandas dataframes
-file_header = ['text', 'labels']
-
-train_df = pd.DataFrame([[' '.join(x[-2]), x[-1]] for x in train_sentences], 
-                        columns =file_header)
-train_df = train_df.sample(frac = 1) # shuffle the train
-
-dev_dict_df = {corpus : pd.DataFrame([[' '.join(x[-2]), x[-1]] 
-                                      for x in sents], 
-                                     columns = file_header)
-               for corpus, sents in dev_dict_sentences.items()}
-
-test_dict_df = {corpus : pd.DataFrame([[' '.join(x[-2]), x[-1]] 
-                                      for x in sents], 
-                                     columns = file_header)
-               for corpus, sents in test_dict_sentences.items()}
-
-#Make datasets from dataframes
-train_dataset = datasets.Dataset.from_pandas(train_df)
-dev_dict_dataset  = {corpus:datasets.Dataset.from_pandas(dev_df) 
-                     for corpus, dev_df in dev_dict_df.items()}
-test_dict_dataset = {corpus:datasets.Dataset.from_pandas(dev_df) 
-                     for corpus, dev_df in test_dict_df.items()}
-
-# get number of labels
-num_labels = len(set([int(x.strip()) 
-                      for x in train_df['labels'].to_string(index=False).split('\n')])) +1
-
-# Encode the data
-train_dataset = train_dataset.map(encode_batch, batched=True)
-train_dataset.set_format(type="torch", columns=["input_ids", "attention_mask", "labels"])
-
-encoded_dev_dataset = {}
-for corpus in dev_dict_dataset:
-    temp = dev_dict_dataset[corpus].map(encode_batch, batched=True)
-    temp.set_format(type="torch", columns=["input_ids", "attention_mask", "labels"])
-    encoded_dev_dataset[corpus] = temp
-
-encoded_test_dataset = {}
-for corpus in test_dict_dataset:
-    temp = test_dict_dataset[corpus].map(encode_batch, batched=True)
-    temp.set_format(type="torch", columns=["input_ids", "attention_mask", "labels"])
-    encoded_test_dataset[corpus] = temp
-
-# ===============================
-# Training params
-# ===============================
-
-model = AutoModelForSequenceClassification.from_pretrained(args.transformer_model)
-
-
-training_args = TrainingArguments(
-    learning_rate    = 2e-5, #1e-4,
-    num_train_epochs = args.num_epochs,
-    per_device_train_batch_size = args.batch_size,
-    per_device_eval_batch_size  = args.batch_size,
-    gradient_accumulation_steps = args.gradient_accumulation_steps,
-    logging_steps  = (len(train_sentences)/(args.batch_size * args.gradient_accumulation_steps)),
-    output_dir = "./training_output",
-    overwrite_output_dir =True,
-    remove_unused_columns=False,
-)
-
-
-trainer = Trainer(
-    model = model,
-    args  = training_args,
-    train_dataset = train_dataset
-)
-
-# Freeze layers in the classifier if desired
-if args.freeze_layers != '':
-    layers_to_freeze = args.freeze_layers.split(';')
-    for name, param in model.named_parameters():
-        if any(x in name for x in layers_to_freeze):
-            param.requires_grad = False
-
-
-# ===============================
-# Start the training 🚀
-# ===============================
-
-print('Start training...')
-trainer.train()
-
-# Dev results
-
-print('\nDev results:')
-for corpus in encoded_dev_dataset:
-    print()
-    dev_results = get_predictions_huggingface(trainer, corpus, 
-                                    encoded_dev_dataset[corpus])
-    
-    
-    path_results = 'results/dev/' + save_name + '_' + str(args.num_epochs)
-    if not os.path.exists(path_results):
-        os.makedirs(path_results)
-                
-    print_results_to_file(corpus, 
-                          dev_dict_sentences[corpus], 
-                          dev_results,
-                          inv_mappings, 
-                          #substitutions_file, 
-                          path_results)
-
-# Test results
-
-print('\ntest results:')
-for corpus in encoded_test_dataset:
-    print()
-    test_results = get_predictions_huggingface(trainer, 
-                                               corpus, 
-                                               encoded_test_dataset[corpus])
-    
-    
-    path_results = 'results/test/' + save_name + '_' + str(args.num_epochs)
-    if not os.path.exists(path_results):
-        os.makedirs(path_results)
-                
-    print_results_to_file(corpus, 
-                          test_dict_sentences[corpus], 
-                          test_results,
-                          inv_mappings, 
-                          substitutions_file, 
-                          path_results)
-
-
-
-#         for corpus in test_dict_dataloader:
-#             test_results = get_predictions(model, 
-#                                 corpus, 
-#                                 test_dict_dataloader[corpus])
-            
-#             path_results = 'results/test/pytorch' + str(epoch_num+1)
-#             if not os.path.exists(path_results):
-#                 os.makedirs(path_results)
-                
-#             print_results_to_file(corpus, 
-#                                 test_dict_sentences[corpus], 
-#                                 test_results,
-#                                 inv_mappings, substitutions_file, 
-#                                 path_results)    
-    
-    
-    
-    
-    
-    
-
-# Save specific test results
-
-# print('\nTest results:')
-# for corpus in encoded_test_dataset:
-#     print()
-#     test_results = get_predictions_huggingface(trainer, corpus, 
-#                                     encoded_test_dataset[corpus])
-# 
-#     print_results_to_file(corpus, test_dict_sentences[corpus], test_results, 
-#                           inv_mappings, substitutions_file)
\ No newline at end of file
diff --git a/.ipynb_checkpoints/make_adapter-checkpoint.py b/.ipynb_checkpoints/make_adapter-checkpoint.py
deleted file mode 100644
index d51875ca89cfdbd52730e8248f4a7fe5add3d24d..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/make_adapter-checkpoint.py
+++ /dev/null
@@ -1,121 +0,0 @@
-#!/usr/bin/env python
-# coding: utf-8
-
-import os
-import numpy as np
-from datetime import datetime
-import pandas as pd
-import torch
-from transformers import AutoModel, AutoTokenizer, AutoModelWithHeads, AutoConfig, TrainingArguments, AdapterTrainer, EvalPrediction, set_seed
-import datasets
-from configure import parse_args
-from sklearn.metrics import accuracy_score
-from utils import *
-
-# parameters
-args = parse_args()
-tokenizer = AutoTokenizer.from_pretrained(args.transformer_model)
-layers_to_freeze = args.freeze_layers.split(';')
-set_seed(42)
-batch_size = args.batch_size
-
-# Set name for adapter
-adapter_name = 'A_' + str(args.num_epochs) + '-epochs_frozen' + args.freeze_layers.replace('layer.', '-').replace(';', '')
-
-print('Create classifier adapter\n')
-print('Name:', adapter_name)
-print('Model:', args.transformer_model)
-print('Batch size:', args.batch_size * args.gradient_accumulation_steps)
-print('Frozen layers:',  args.freeze_layers.replace(';', ', '))
-
-# Open mappings
-mappings, inv_mappings = open_mappings(args.mappings_file)
-
-# Open sentences
-train_sentences, dev_dict_sentences, _ = open_sentences(args.data_path, mappings)
-
-# make pandas dataframes
-file_header = ['text', 'labels']
-train_df = pd.DataFrame([[' '.join(x[-2]), x[-1]] for x in train_sentences], columns=file_header)
-train_df = train_df.sample(frac = 1) # shuffle the train
-# get a global dev accuracy, we will not be directly using these results
-dev_df = pd.DataFrame([[' '.join(x[-2]), x[-1]] 
-                       for sents in dev_dict_sentences.values()
-                       for x in sents ], columns=file_header)
-
-#Make datasets from dataframes
-train_dataset = datasets.Dataset.from_pandas(train_df)
-dev_dataset = datasets.Dataset.from_pandas(dev_df)
-
-# get number of labels
-num_labels = len(set([int(x.strip()) 
-                      for x in train_df['labels'].to_string(index=False).split('\n')])) +1
-
-# Encode the data
-train_dataset = train_dataset.map(encode_batch, batched=True)
-train_dataset.set_format(type="torch", columns=["input_ids", "attention_mask", "labels"])
-
-dev_dataset = dev_dataset.map(encode_batch, batched=True)
-dev_dataset.set_format(type="torch", columns=["input_ids", "attention_mask", "labels"])
-
-
-# Training
-
-config = AutoConfig.from_pretrained(
-    args.transformer_model,
-    num_labels=num_labels,
-)
-model = AutoModelWithHeads.from_pretrained(
-    args.transformer_model,
-    config=config,
-)
-
-# Add a new adapter
-model.add_adapter(adapter_name)
-# Add a matching classification head
-model.add_classification_head(
-    adapter_name,
-    num_labels=num_labels,
-    id2label=inv_mappings
-  )
-
-# Activate the adapter
-print('Initialize adapter...')
-model.train_adapter(adapter_name)
-
-training_args = TrainingArguments(
-    learning_rate    = 1e-4,
-    num_train_epochs = args.num_epochs,
-    per_device_train_batch_size = args.batch_size,
-    per_device_eval_batch_size  = args.batch_size,
-    gradient_accumulation_steps = args.gradient_accumulation_steps,
-    logging_steps  = (len(train_sentences)/(args.batch_size * args.gradient_accumulation_steps)),
-    output_dir = "./training_output",
-    overwrite_output_dir =True,
-    remove_unused_columns=False,
-)
-
-trainer = AdapterTrainer(
-    model=model,
-    args=training_args,
-    train_dataset=train_dataset,
-)
-
-# freeze layers
-if args.freeze_layers != '':
-    for name, param in model.named_parameters():
-        if any(x in name for x in layers_to_freeze):
-            param.requires_grad = False
-
-# Start the training 🚀
-print('\nStart training...\n')
-trainer.train()
-
-# Save adapter to load for the finetuned model
-model.save_adapter(adapter_name, adapter_name)
-
-# Perform evaluation
-results = trainer.predict(dev_dataset)
-preds = np.argmax(results.predictions, axis=1)
-results = results.label_ids
-print('Dev accuracy:', round(accuracy_score(preds, results), 4))
\ No newline at end of file
diff --git a/.ipynb_checkpoints/pytorch_classifier-checkpoint.py b/.ipynb_checkpoints/pytorch_classifier-checkpoint.py
deleted file mode 100644
index 33cfa6170ba1e7841d3d93203056f88af13e2148..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/pytorch_classifier-checkpoint.py
+++ /dev/null
@@ -1,291 +0,0 @@
-#!/usr/bin/env python
-# coding: utf-8
-
-import torch
-import numpy as np
-from transformers import AutoModel, AutoTokenizer, get_linear_schedule_with_warmup, set_seed
-from torch import nn
-from torch.optim import AdamW
-from torch.utils.data import DataLoader
-import torch.nn.functional as F
-from torch.autograd import Variable
-from tqdm import tqdm
-import os
-from time import sleep
-from datetime import datetime
-import sys
-from sklearn.metrics import classification_report, accuracy_score
-from configure import parse_args
-from utils import *
-
-args = parse_args()
-now = datetime.now()
-dt_string = now.strftime("%d.%m.%y-%H:%M:%S")
-layers_to_freeze = args.freeze_layers.split(";")
-substitutions_file = 'mappings/substitutions.txt'
-mapping_classes = args.mappings_file[:-4].split('-')[-1]
-# specific_results = open_specific_results('mappings/specific_results.txt')['B']
-set_seed(42)
-
-# ===============
-# Dataset class
-# ===============
-
-class Dataset(torch.utils.data.Dataset):
-
-    def __init__(self, sentences):
-
-        self.labels = [sent[-1] for sent in sentences]
-        self.texts = [tokenizer(sent[-2], 
-                                is_split_into_words=True,                              
-                                padding='max_length', 
-                                max_length = 512, 
-                                truncation=True,
-                                return_tensors="pt") 
-                                for sent in sentences]
-
-    def classes(self):
-        return self.labels
-
-    def __len__(self):
-        return len(self.labels)
-    
-    def get_batch_labels(self, idx):
-        # Fetch a batch of labels
-        return np.array(self.labels[idx])
-
-    def get_batch_texts(self, idx):
-        # Fetch a batch of inputs
-        return self.texts[idx]
-
-    def __getitem__(self, idx):
-
-        batch_texts = self.get_batch_texts(idx)
-        batch_y = self.get_batch_labels(idx)
-
-        return batch_texts, batch_y
-
-# ===============
-# Load datasets
-# ===============
-
-# Open mappings
-mappings, inv_mappings = open_mappings(args.mappings_file)
-batch_size = args.batch_size
-tokenizer  = AutoTokenizer.from_pretrained(args.transformer_model)
-
-train_sentences, dev_dict_sentences, test_dict_sentences = open_sentences(args.data_path, mappings)
-
-# Determine linear size (= number of classes in the sets + 1)
-num_labels = len(set(sent[-1] for sent in train_sentences)) + 1
-
-# make train/dev datasets
-train_dataset = Dataset(train_sentences)
-dev_dataset   = {corpus: Dataset(s) for corpus, s in dev_dict_sentences.items()}
-test_dataset  = {corpus: Dataset(s) for corpus, s in test_dict_sentences.items()}
-
-# Make dasets with batches and dataloader
-train_dataloader = DataLoader(train_dataset, batch_size, shuffle=True)
-dev_dict_dataloader = {corpus: DataLoader(dev_data, batch_size) 
-                        for corpus, dev_data in dev_dataset.items()}
-test_dict_dataloader = {corpus: DataLoader(test_data, batch_size) 
-                        for corpus, test_data in test_dataset.items()}
-
-
-# ===============
-# Model setup
-# ===============
-
-class TransformerClassifier(nn.Module):
-
-    def __init__(self, dropout=args.dropout):
-
-        super(TransformerClassifier, self).__init__()
-
-        self.tr_model = AutoModel.from_pretrained(args.transformer_model)
-        self.dropout = nn.Dropout(dropout)
-        self.linear = nn.Linear(768, num_labels) # bert input x num of classes
-        self.relu = nn.ReLU()
-
-    def forward(self, input_id, mask):
-        
-        outputs = self.tr_model(input_ids = input_id, 
-                                attention_mask = mask,
-                                return_dict = True)['last_hidden_state'][:, 0, :]
-        dropout_output = self.dropout(outputs)
-        linear_output = self.linear(dropout_output)
-        final_layer = self.relu(linear_output)
-
-        return final_layer
-
-
-model = TransformerClassifier()
-
-
-def train(model, 
-          train_dataloader, 
-          dev_dict_dataloader, 
-          test_dict_sentences, 
-          test_dict_dataloader,
-          epochs, 
-          #specific_results
-         ):
-
-    device = torch.device("cuda" if args.use_cuda else "cpu")
-
-    criterion = nn.CrossEntropyLoss()
-    optimizer = AdamW(model.parameters(), #Adam
-                      lr = 2e-5, #1e-6
-                      eps = 1e-8
-                    )
-
-    if args.use_cuda:
-        model = model.cuda()
-        criterion = criterion.cuda()
-    
-    gradient_accumulation_steps = args.gradient_accumulation_steps
-    total_steps = len(train_dataloader) * epochs
-    scheduler = get_linear_schedule_with_warmup(optimizer, 
-                                                num_warmup_steps = 0,
-                                                num_training_steps = total_steps)
-    
-    seed_val = 42
-    torch.manual_seed(seed_val)
-    torch.cuda.manual_seed_all(seed_val)
-    
-    # freeze layers, see argument in configure.py
-    if args.freeze_layers != '':
-        for name, param in model.named_parameters():
-            if any(x in name for x in layers_to_freeze):
-                param.requires_grad = False
-
-    for epoch_num in range(0, epochs):
-        print('\n=== Epoch {:} / {:} ==='.format(epoch_num + 1, epochs))
-        
-        model.train()
-
-        total_acc_train = 0
-        total_loss_train = 0
-        batch_counter = 0
-        
-#         for train_input, train_label in tqdm(train_dataloader):
-        for train_input, train_label in train_dataloader:
-            batch_counter += 1
-            train_label = train_label.to(device)
-            mask = train_input['attention_mask'].to(device)
-            input_id = train_input['input_ids'].squeeze(1).to(device)
-
-            output = model(input_id, mask)
-                
-#             batch_loss = criterion(output, train_label.long())
-#             total_loss_train += batch_loss.item()
-                
-#             acc = (output.argmax(dim=1) == train_label).sum().item()
-#             total_acc_train += acc
-            
-            # Compute Loss and Perform Back-propagation
-            loss = criterion(output, train_label.long())
-
-
-            # Normalize the Gradients
-            loss = loss / gradient_accumulation_steps
-            loss.backward()
-
-            
-            if (batch_counter % gradient_accumulation_steps == 0):
-                # Update Optimizer
-                optimizer.step() # or flip them?
-                optimizer.zero_grad()
-                
-                model.zero_grad()
-#             loss.backward()
-                torch.nn.utils.clip_grad_norm_(model.parameters(), 1.0)
-#             optimizer.step()
-                scheduler.step()
-            
-        # ------ Validation --------
-        
-        print('\nValidation for epoch:', epoch_num + 1)
-        
-        # Dev and test results for each corpus. We don't need to save the results.
-        for corpus in dev_dict_dataloader:
-            dev_results = get_predictions(model, 
-                                corpus, 
-                                dev_dict_dataloader[corpus])
-            
-            path_results = 'results/dev/pytorch_' + mapping_classes + '_' + str(epoch_num+1)
-            if not os.path.exists(path_results):
-                os.makedirs(path_results)
-                
-            print_results_to_file(corpus, 
-                                dev_dict_sentences[corpus], 
-                                dev_results,
-                                inv_mappings, #substitutions_file, 
-                                path_results)
-            
-        # ------ Test --------
-        
-        print('\nTest results for epoch:', epoch_num + 1)
-        
-        for corpus in test_dict_dataloader:
-            test_results = get_predictions(model, 
-                                corpus, 
-                                test_dict_dataloader[corpus])
-            
-            path_results = 'results/test/pytorch_' + mapping_classes + '_' + str(epoch_num+1)
-            if not os.path.exists(path_results):
-                os.makedirs(path_results)
-                
-            print_results_to_file(corpus, 
-                                test_dict_sentences[corpus], 
-                                test_results,
-                                inv_mappings, #substitutions_file, 
-                                path_results)
-            
-            
-#         # we want the results of specific epochs for specific corpora. 
-#         # we define the epochs and the corpora and we save only these results.
-        
-#         if epoch_num+1 in specific_results:
-#             for corpus in specific_results[epoch_num+1]:
-#                 test_results = get_predictions(model, 
-#                                                corpus, 
-#                                                test_dict_dataloader[corpus], 
-#                                                print_results=False)
-
-
-        # ========= New Code! =============
-        # Save for each epoch the dev and test results 
-        
-        
-
-                
-# ------- Start the training -------   
-
-print('\nModel: ', args.transformer_model)
-print('Batch size: ', args.batch_size * args.gradient_accumulation_steps)
-print('\nStart training...\n')
-train(model, 
-      train_dataloader,
-      dev_dict_dataloader, 
-      test_dict_sentences, 
-      test_dict_dataloader,
-      args.num_epochs, 
-#       specific_results
-     )
-print('\nTraining Done!')
-
-
-# ------- Testing ---------
-
-# print('Testing...')
-# for corpus in test_dict_dataloader:
-#     test_results = get_predictions(model, 
-#                                    corpus, 
-#                                    test_dict_dataloader[corpus]
-#                                   )
-#     print_results_to_file(corpus, 
-#                           test_dict_sentences[corpus], 
-#                           test_results,
-#                           inv_mappings, 
-#                           substitutions_file)
\ No newline at end of file
diff --git a/.ipynb_checkpoints/requirements-checkpoint.txt b/.ipynb_checkpoints/requirements-checkpoint.txt
deleted file mode 100644
index 396a0f04275209d4371e47b3b7fc448f1de217b3..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/requirements-checkpoint.txt
+++ /dev/null
@@ -1,44 +0,0 @@
-adapter-transformers==3.0.1
-certifi==2023.5.7
-charset-normalizer
-cmake==3.26.3
-datasets==2.4.0
-fsspec
-huggingface-hub==0.14.1
-idna==3.4
-Jinja2==3.1.2
-joblib==1.2.0
-lit==16.0.3
-MarkupSafe==2.1.2
-mpmath==1.3.0
-multidict==6.0.4
-multiprocess==0.70.13
-networkx==3.1
-packaging==23.1
-pandas==2.0.1
-Pillow==9.5.0
-pyarrow==12.0.0
-python-dateutil==2.8.2
-pytz==2023.3
-PyYAML==6.0
-regex==2023.5.5
-requests==2.30.0
-responses==0.18.0
-sacremoses==0.0.53
-scikit-learn==1.2.2
-scipy==1.10.1
-six==1.16.0
-sympy==1.12
-threadpoolctl==3.1.0
-tokenizers==0.12.1
-torch==2.0.1
-torchaudio==2.0.2
-torchvision
-tqdm==4.65.0
-transformers==4.18.0
-triton==2.0.0
-typing_extensions==4.5.0
-tzdata==2023.3
-urllib3==2.0.2
-xxhash==3.2.0
-yarl==1.9.2
\ No newline at end of file
diff --git a/.ipynb_checkpoints/run_stuff-checkpoint.sh b/.ipynb_checkpoints/run_stuff-checkpoint.sh
deleted file mode 100644
index d08fa1b2ea48c6cc2e5b24f371a3c35c525b47f4..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/run_stuff-checkpoint.sh
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env bash
-
-#SBATCH --job-name=adapters
-
-#SBATCH --ntasks=1
-#SBATCH --cpus-per-task=4
-#SBATCH --partition=RTX6000Node
-#SBATCH --gres=gpu:1
-
-
-# tests tests
-
-# srun singularity exec /logiciels/containerCollections/CUDA10/pytorch.sif python3 pytorch_classifier.py --batch_size 8 --num_epochs 10 --data_path '/users/melodi/emetheni/clean_data' --mappings_file 'mappings/mappings-classes-braud.tsv'
-
-# srun singularity exec /logiciels/containerCollections/CUDA10/pytorch.sif python3 huggingface_classifier.py --batch_size 4 --gradient_accumulation_steps 32 --num_epochs 1 --data_path '/users/melodi/emetheni/clean_data' --mappings_file 'mappings/mappings-classes-braud.tsv'
-
-# Train the adapter:
-# srun singularity exec /logiciels/containerCollections/CUDA10/pytorch.sif python3 make_adapter.py --batch_size 8 --num_epochs 15 --data_path '/users/melodi/emetheni/sharedtask2023/data' --freeze_layers 'layer.1;layer.2;layer.3' --mappings_file 'mappings/mappings-classes-braud.tsv'
-
-# srun singularity exec /logiciels/containerCollections/CUDA10/pytorch.sif python3 make_adapter.py --batch_size 8 --num_epochs 15 --data_path '/users/melodi/emetheni/sharedtask2023/data' --freeze_layers 'layer.1;layer.2;layer.3;layer.4' --mappings_file 'mappings/mappings-classes-braud.tsv'
-
-srun singularity exec /logiciels/containerCollections/CUDA10/pytorch.sif python3 make_adapter.py --batch_size 8 --num_epochs 15 --data_path '/users/melodi/emetheni/sharedtask2023/data' --freeze_layers 'layer.1' --mappings_file 'mappings/mappings-classes-braud.tsv'
-
-# Run classifier with adapter for corpora:
-
-
-# srun singularity exec /logiciels/containerCollections/CUDA10/pytorch.sif python3 adapter_classifier.py --batch_size 8 --num_epochs 1 --data_path '/users/melodi/emetheni/clean_data' --adapter_name 'A_15-epochs_frozen-1-2-3'
-# srun singularity exec /logiciels/containerCollections/CUDA10/pytorch.sif python3 adapter_classifier.py --batch_size 8 --num_epochs 2 --data_path '/users/melodi/emetheni/clean_data' --adapter_name 'A_15-epochs_frozen-1-2-3'
-# srun singularity exec /logiciels/containerCollections/CUDA10/pytorch.sif python3 adapter_classifier.py --batch_size 8 --num_epochs 3 --data_path '/users/melodi/emetheni/clean_data' --adapter_name 'A_15-epochs_frozen-1-2-3'
-# srun singularity exec /logiciels/containerCollections/CUDA10/pytorch.sif python3 adapter_classifier.py --batch_size 8 --num_epochs 4 --data_path '/users/melodi/emetheni/clean_data' --adapter_name 'A_15-epochs_frozen-1-2-3'
-# srun singularity exec /logiciels/containerCollections/CUDA10/pytorch.sif python3 adapter_classifier.py --batch_size 8 --num_epochs 5 --data_path '/users/melodi/emetheni/clean_data' --adapter_name 'A_15-epochs_frozen-1-2-3'
-# srun singularity exec /logiciels/containerCollections/CUDA10/pytorch.sif python3 adapter_classifier.py --batch_size 8 --num_epochs 6 --data_path '/users/melodi/emetheni/clean_data' --adapter_name 'A_15-epochs_frozen-1-2-3'
-
-
-# srun singularity exec /logiciels/containerCollections/CUDA10/pytorch.sif python3  adapter_classifier.py --batch_size 8 --num_epochs 1 --data_path '/users/melodi/emetheni/clean_data' --adapter_name 'A_15-epochs_frozen-1'
-# srun singularity exec /logiciels/containerCollections/CUDA10/pytorch.sif python3  adapter_classifier.py --batch_size 8 --num_epochs 2 --data_path '/users/melodi/emetheni/clean_data' --adapter_name 'A_15-epochs_frozen-1'
-# srun singularity exec /logiciels/containerCollections/CUDA10/pytorch.sif python3  adapter_classifier.py --batch_size 8 --num_epochs 3 --data_path '/users/melodi/emetheni/clean_data' --adapter_name 'A_15-epochs_frozen-1'
-# srun singularity exec /logiciels/containerCollections/CUDA10/pytorch.sif python3  adapter_classifier.py --batch_size 8 --num_epochs 4 --data_path '/users/melodi/emetheni/clean_data' --adapter_name 'A_15-epochs_frozen-1'
-# srun singularity exec /logiciels/containerCollections/CUDA10/pytorch.sif python3  adapter_classifier.py --batch_size 8 --num_epochs 5 --data_path '/users/melodi/emetheni/clean_data' --adapter_name 'A_15-epochs_frozen-1'
-# srun singularity exec /logiciels/containerCollections/CUDA10/pytorch.sif python3  adapter_classifier.py --batch_size 8 --num_epochs 6 --data_path '/users/melodi/emetheni/clean_data' --adapter_name 'A_15-epochs_frozen-1'
diff --git a/.ipynb_checkpoints/see_results-checkpoint.ipynb b/.ipynb_checkpoints/see_results-checkpoint.ipynb
deleted file mode 100644
index b7d64b6e1ee293ac5c084eacad8d11a64fc4224e..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/see_results-checkpoint.ipynb
+++ /dev/null
@@ -1,480 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from sklearn.metrics import accuracy_score\n",
-    "import os, io\n",
-    "from collections import OrderedDict, Counter"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "connectives = {\"elaboration\": [\"and\", \"also\", \"besides\", \"further\", \"furthermore\", \"too\", \"moreover\", \"in addition\", \"then\", \"of equal importance\", \"equally important\", \"another\", \"additionally\", \"also\", \"moreover\", \"furthermore\", \"again\", \"further\", \"then\", \"besides\", \"too\", \"similarly\", \"correspondingly\", \"indeed\", \"regarding\"], \n",
-    "\"time\": [\"next\", \"afterward\", \"finally\", \"later\", \"last\", \"lastly\", \"at last\", \"now\", \"subsequently\", \"then\", \"when\", \"soon\", \"thereafter\", \"after a short time\", \"the next week\", \"a minute later\", \"in the meantime\", \"meanwhile\", \"on the following day\", \"at length\", \"ultimately\", \"presently\"], \n",
-    "\"sequence\": [\"first\", \"second\", \"third\", \"finally\", \"hence\", \"next\", \"then\", \"from here on\", \"to begin with\", \"last of all\", \"after\", \"before\", \"as soon as\", \"in the end\", \"gradually\", \"when\", \"after\", \"after that\", \"afterwards\", \"next\", \"subsequently\", \"later (on)\", \"followed by\", \"to go on to\", \"finally\", \"another\", \"additionally\", \"finally moreover\", \"also\", \"subsequently\", \"eventually\", \"next\", \"then\"], \n",
-    "\"example\": [\"for example\", \"to illustrate\", \"for instance\", \"to be specific\", \"such as\", \"moreover\", \"furthermore\", \"just as important\", \"similarly\", \"in the same way\", \"for example\", \"for instance\", \"namely\", \"such as\", \"as follows\", \"as exemplified by\", \"such as\", \"including\", \"especially\", \"particularly\", \"in particular\", \"notably\", \"mainly\"], \n",
-    "\"result\": [\"as a result\", \"hence\", \"so\", \"accordingly\", \"as a consequence\", \"consequently\", \"thus\", \"since\", \"therefore\", \"for this reason\", \"because of this\", \"therefore\", \"accordingly\", \"as a result of\", \"the result is/results are\", \"the consequence is\", \"resulting from\", \"consequently\", \"it can be seen\", \"evidence illustrates that\", \"because of this\", \"thus\", \"hence\", \"for this reason\", \"owing to x\", \"this suggests that\", \"it follows that\", \"otherwise\", \"in that case\", \"that implies\", \"As a result\", \"therefore\", \"thus\"], \n",
-    "\"purpose\": [\"for this purpose\", \"with this in mind\", \"for this reason\"], \n",
-    "\"comparison\": [\"like\", \"in the same manner\", \"as so\", \"similarly\"], \n",
-    "\"contrast\": [\"but\", \"in contrast\", \"conversely\", \"however\", \"still\", \"nevertheless\", \"nonetheless\", \"yet\", \"and yet\", \"on the other hand\", \"on the contrary\", \"or\", \"in spite of this\", \"actually\", \"in fact\", \"whereas\", \"conversely\", \"in comparison\", \"by contrast\", \"in contrast\", \"contrasting\", \"alternatively\", \"although\", \"otherwise\", \"instead\"], \n",
-    "\"summary\": [\"in summary\", \"to sum up\", \"to repeat\", \"briefly\", \"in short\", \"finally\", \"on the whole\", \"therefore\", \"as I have said\", \"in conclusion\", \"as seen\", \"in conclusion\", \"therefore\", \"to conclude\", \"on the whole\", \"hence\", \"thus to summarise\", \"altogether\", \"overall\"], \n",
-    "\"rephrasing\": [\"in other terms\", \"rather\", \"or\", \"better\", \"in view of this\", \"in contrast\"]}\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "def parse_data(infile, string_input=False) -> list:\n",
-    "    \"\"\"\n",
-    "    This function is to read a gold or a pred file to obtain the label column for accuracy calculation.\n",
-    "\n",
-    "    :param infile: shared task .rels file\n",
-    "    :param string_input: If True, files are replaced by strings with file contents (for import inside other scripts)\n",
-    "    :return: a list of labels\n",
-    "    \"\"\"\n",
-    "\n",
-    "    if not string_input:\n",
-    "        data = io.open(infile, encoding=\"utf-8\").read().strip().replace(\"\\r\", \"\")\n",
-    "    else:\n",
-    "        data = infile.strip()\n",
-    "\n",
-    "    labels = [line.split(\"\\t\")[-1].lower() \n",
-    "              for i, line in enumerate(data.split(\"\\n\")) if \"\\t\" in line and i>0]\n",
-    "    \n",
-    "    sentences = [(line.split(\"\\t\")[3], line.split(\"\\t\")[4], line.split(\"\\t\")[-3])\n",
-    "                 for i, line in enumerate(data.split(\"\\n\")) if \"\\t\" in line and i>0]\n",
-    "    \n",
-    "    return sentences, labels\n",
-    "\n",
-    "\n",
-    "def get_accuracy_score(gold_file, pred_file, string_input=False) -> dict:\n",
-    "\n",
-    "    _, gold_labels = parse_data(gold_file, string_input)\n",
-    "    _, pred_labels = parse_data(pred_file, string_input)\n",
-    "\n",
-    "    filename = gold_file.split(os.sep)[-1]\n",
-    "\n",
-    "    assert len(gold_labels) == len(pred_labels), \"FATAL: different number of labels detected in gold and pred\"\n",
-    "\n",
-    "    acc = accuracy_score(gold_labels, pred_labels)\n",
-    "\n",
-    "    score_dict = {\"filename\": filename,\n",
-    "                  \"acc_score\": round(acc, 4),\n",
-    "                  \"gold_rel_count\": len(gold_labels),\n",
-    "                  \"pred_rel_count\": len(pred_labels)}\n",
-    "\n",
-    "    return score_dict\n",
-    "\n",
-    "def separate_right_wrong(gold_file, pred_file, string_input=False):\n",
-    "    \n",
-    "    rights = []\n",
-    "    wrongs = []\n",
-    "    \n",
-    "    gold_sents, gold_labels = parse_data(gold_file, string_input)\n",
-    "    pred_sents, pred_labels = parse_data(pred_file, string_input)\n",
-    "    \n",
-    "    for n in range(len(gold_sents)):\n",
-    "        if gold_labels[n] == pred_labels[n]:\n",
-    "            rights.append([gold_sents[n], gold_labels[n], pred_labels[n]])\n",
-    "        else:\n",
-    "            wrongs.append([gold_sents[n], gold_labels[n], pred_labels[n]])\n",
-    "    \n",
-    "    return rights, wrongs"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Print accuracies \n",
-    "\n",
-    "model = 'A_15-epochs_frozen-1_2'\n",
-    "corpus = 'eng.dep.covdtb'\n",
-    "\n",
-    "gold_path = '/users/melodi/emetheni/clean_data/'\n",
-    "results_path = 'results/test/' + model + '/'\n",
-    "\n",
-    "corpora = sorted([x[:-4] for x in os.listdir('results/test/' + model) \n",
-    "           if not \"DS\" in x if not 'ipy' in x])\n",
-    "\n",
-    "# for corpus in corpora:\n",
-    "#     score = get_accuracy_score(gold_path + corpus + '/' + corpus + '_test.rels', \n",
-    "#                                results_path + corpus + '.tsv')\n",
-    "\n",
-    "#     print(corpus, '\\t', score['acc_score'])"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Separate\n",
-    "\n",
-    "# model = 'A_15-epochs_frozen-1_2'\n",
-    "# corpus = 'eng.dep.covdtb'\n",
-    "\n",
-    "model = 'A_15-epochs_frozen-1-2-3_3'\n",
-    "corpus = 'eng.rst.gum'\n",
-    "\n",
-    "gold_path = '/users/melodi/emetheni/clean_data/'\n",
-    "results_path = 'results/test/' + model + '/'\n",
-    "\n",
-    "corpora = sorted([x[:-4] for x in os.listdir('results/test/' + model) \n",
-    "           if not \"DS\" in x if not 'ipy' in x])\n",
-    "\n",
-    "rights, wrongs = separate_right_wrong(gold_path + corpus + '/' + corpus + '_test.rels', \n",
-    "                           results_path + corpus + '.tsv')\n",
-    "\n",
-    "rights_count = dict(OrderedDict(Counter([x[-1] for x in rights])))\n",
-    "wrongs_count = dict(OrderedDict(Counter([x[-1] for x in wrongs])))\n",
-    "\n",
-    "# for label in sorted(set(list(rights_count.keys()) + list(wrongs_count.keys())), reverse=False):\n",
-    "#     if label in rights_count:\n",
-    "#         r = rights_count[label]\n",
-    "#     else:\n",
-    "#         r = 0\n",
-    "#     if label in wrongs_count:\n",
-    "#         w = wrongs_count[label]\n",
-    "#     else:\n",
-    "#         w = 0\n",
-    "#     print(label, '\\t', r, '\\t', w)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "rights 203 / 1657 0.12251056125528063\n",
-      "wrongs 71 / 918 0.07734204793028322\n"
-     ]
-    }
-   ],
-   "source": [
-    "# Presence of connectives in right/wrong sents\n",
-    "\n",
-    "counter = 0\n",
-    "for sent in rights:\n",
-    "    sentence = (sent[0][0] + ' ' + sent[0][1]).lower()\n",
-    "    if sent[1] in connectives:\n",
-    "        if any(x in sentence for x in connectives[sent[1]]):\n",
-    "#             print(sent)\n",
-    "            counter += 1\n",
-    "print('rights', counter, '/', len(rights), counter/len(rights))\n",
-    "\n",
-    "counter = 0\n",
-    "for sent in wrongs:\n",
-    "    \n",
-    "    sentence = (sent[0][0] + ' ' + sent[0][1]).lower()\n",
-    "    if sent[1] in connectives:\n",
-    "        if any(x in sentence for x in connectives[sent[1]]):\n",
-    "#             print(sent)\n",
-    "            counter += 1\n",
-    "print('wrongs', counter, '/', len(wrongs), counter/len(wrongs))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "rights 1253 / 1657 0.756185878092939\n",
-      "wrongs 735 / 918 0.8006535947712419\n"
-     ]
-    }
-   ],
-   "source": [
-    "# See direction\n",
-    "\n",
-    "counter = 0\n",
-    "for sent in rights:\n",
-    "    if sent[0][2] == '1<2':\n",
-    "        counter += 1\n",
-    "print('rights', counter, '/', len(rights), counter/len(rights))\n",
-    "\n",
-    "counter = 0\n",
-    "for sent in wrongs:\n",
-    "    if sent[0][2] == '1<2':\n",
-    "        counter += 1\n",
-    "print('wrongs', counter, '/', len(wrongs), counter/len(wrongs))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "[[('The prevalence of discrimination across racial groups in contemporary America :',\n",
-       "   'The current study seeks to build on this research',\n",
-       "   '1>2'),\n",
-       "  'organization',\n",
-       "  'organization'],\n",
-       " [('The prevalence of discrimination across racial groups in contemporary America :',\n",
-       "   'Results from a nationally representative sample of adults',\n",
-       "   '1<2'),\n",
-       "  'elaboration',\n",
-       "  'elaboration'],\n",
-       " [('Introduction .',\n",
-       "   'The current study seeks to build on this research',\n",
-       "   '1>2'),\n",
-       "  'organization',\n",
-       "  'organization'],\n",
-       " [('Personal experiences of discrimination and bias have been the focus of much social science research .',\n",
-       "   'In many respects , researchers already possess a wealth of knowledge',\n",
-       "   '1>2'),\n",
-       "  'context',\n",
-       "  'context'],\n",
-       " [('Personal experiences of discrimination and bias have been the focus of much social science research .',\n",
-       "   '[ 1 - 3 ]',\n",
-       "   '1<2'),\n",
-       "  'explanation',\n",
-       "  'explanation'],\n",
-       " [('Sociologists have explored the adverse consequences of discrimination',\n",
-       "   '[ 3 – 5 ] ;',\n",
-       "   '1<2'),\n",
-       "  'explanation',\n",
-       "  'explanation'],\n",
-       " [('Sociologists have explored the adverse consequences of discrimination',\n",
-       "   'psychologists have examined the mental processes',\n",
-       "   '1<2'),\n",
-       "  'joint',\n",
-       "  'joint'],\n",
-       " [('psychologists have examined the mental processes',\n",
-       "   'that underpin conscious and unconscious biases',\n",
-       "   '1<2'),\n",
-       "  'elaboration',\n",
-       "  'elaboration'],\n",
-       " [('psychologists have examined the mental processes', '[ 6 ] ;', '1<2'),\n",
-       "  'explanation',\n",
-       "  'explanation'],\n",
-       " [('Sociologists have explored the adverse consequences of discrimination',\n",
-       "   'neuroscientists have examined the neurobiological underpinnings of discrimination',\n",
-       "   '1<2'),\n",
-       "  'joint',\n",
-       "  'joint']]"
-      ]
-     },
-     "execution_count": 15,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "rights[:10]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 19,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "subs = {\"ATTRIBUTION\": [\"attribution\", \"attribution-negative\"], \n",
-    "\"BACKGROUND\": [\"background\", \"circumstance\", \"circunstancia\", \"fondo\", \"preparacion´ \", \"preparation\", \"prestatzea\", \"testuingurua\", \"zirkunstantzia\"], \n",
-    "\"CAUSE\": [\"causa\", \"cause\", \"cause-result\", \"consequence\", \"kausa\", \"non-volitional-cause\", \"non-volitional-result\", \"ondorioa\", \"result\", \"resultado\", \"volitional-cause\", \"volitional-result\"], \n",
-    "\"COMPARISON\": [\"analogy\", \"comparison\", \"preference\", \"proportion\"], \n",
-    "\"CONDITION\": [\"alderantzizko-baldintza\", \"alternativa\", \"aukera\", \"baldintza\", \"condicion´ \", \"condicion-inversa ´ \", \"condition\", \"contingency\", \"ez-baldintzatzailea\", \"hypothetical\", \"otherwise\", \"unconditional\", \"unless\"], \n",
-    "\"CONTRAST\": [\"antitesia\", \"antithesis\", \"ant´ıtesis\", \"concesion´ \", \"concession\", \"contrast\", \"contraste\", \"kontrastea\", \"kontzesioa\"], \n",
-    "\"ELABORATION\": [\"definition\", \"e-elaboration\", \"elaboracion\", \"elaboration\", \"elaboration-additional\", \"elaboration-general-specific\", \"elaboration-object-attribute\", \"elaboration-part-whole\", \"elaboration-process-step\", \"elaboration-set-member\", \"elaborazioa\", \"example\", \"parenthetical\"], \n",
-    "\"ENABLEMENT\": [\"ahalbideratzea\", \"capacitacion´ \", \"enablement\", \"helburua\", \"proposito \", \"purpose\"], \n",
-    "\"EVALUATION\": [\"comment\", \"conclusion\", \"ebaluazioa\", \"evaluacion´ \", \"evaluation\", \"interpretacion´ \", \"interpretation\", \"interpretazioa\"], \n",
-    "\"EXPLANATION\": [\"ebidentzia\", \"evidence\", \"evidencia\", \"explanation\", \"explanation-argumentative\", \"justificacion´ \", \"justifikazioa\", \"justify\", \"motibazioa\", \"motivacion´ \", \"motivation\", \"reason\"], \n",
-    "\"JOINT\": [\"bateratzea\", \"conjuncion´ \", \"conjunction\", \"disjunction\", \"disjuntzioa\", \"disyuncion´ \", \"joint\", \"konjuntzioa\", \"list\", \"lista\", \"union´\"], \n",
-    "\"MANNER-MEANS\": [\"manner\", \"means\", \"medio\", \"metodoa\"], \n",
-    "\"SAME-UNIT\": [\"same-unit\"], \n",
-    "\"SUMMARY\": [\"birformulazioa\", \"definitu-gabeko-erlazioa\", \"laburpena\", \"reformulacion´ \", \"restatement\", \"resumen\", \"summary\"], \n",
-    "\"TEMPORAL\": [\"inverted-sequence\", \"secuencia\", \"sekuentzia\", \"sequence\", \"temporal-after\", \"temporal-before\", \"temporal-same-time\"], \n",
-    "\"TEXTUAL-ORGANIZATION\": [\"textual-organization\"], \n",
-    "\"TOPIC-CHANGE\": [\"topic-drift\", \"topic-shift\"], \n",
-    "\"TOPIC-COMMENT\": [\"arazo-soluzioa\", \"comment-topic\", \"problem-solution\", \"question-answer\", \"rhetorical-question\", \"solucion´ ,solutionhood\", \"statement-response\", \"topic-comment\"]}"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 20,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "rst = [\"adversative\", \"adversative-antithesis\", \"adversative-concession\", \"adversative-contrast\", \"alternative\", \"antithesis\", \"attribution\", \"attribution-negative\", \"attribution-positive\", \"background\", \"causal\", \"causal-cause\", \"causal-result\", \"cause\", \"cause-effect\", \"circumstance\", \"comparison\", \"concession\", \"conclusion\", \"condition\", \"conjunction\", \"context\", \"context-background\", \"context-circumstance\", \"contingency\", \"contingency-condition\", \"contrast\", \"disjunction\", \"e-elaboration\", \"effect\", \"elaboration\", \"elaboration-additional\", \"elaboration-attribute\", \"enablement\", \"evaluation\", \"evaluation-comment\", \"evaluation-n\", \"evaluation-s\", \"evidence\", \"explanation\", \"explanation-evidence\", \"explanation-justify\", \"explanation-motivation\", \"interpretation\", \"interpretation-evaluation\", \"joint\", \"joint-disjunction\", \"joint-list\", \"joint-other\", \"joint-sequence\", \"justify\", \"list\", \"manner-means\", \"means\", \"mode\", \"mode-manner\", \"mode-means\", \"motivation\", \"nonvolitional-cause\", \"nonvolitional-cause-e\", \"nonvolitional-result\", \"nonvolitional-result-e\", \"organization\", \"organization-heading\", \"organization-phatic\", \"organization-preparation\", \"otherwise\", \"parenthetical\", \"preparation\", \"purpose\", \"purpose-attribute\", \"purpose-goal\", \"reason\", \"restatement\", \"restatement-mn\", \"restatement-partial\", \"restatement-repetition\", \"result\", \"sequence\", \"solutionhood\", \"summary\", \"temporal\", \"textual-organization\", \"topic\", \"topic-change\", \"topic-comment\", \"topic-drift\", \"topic-question\", \"topic-solutionhood\", \"unconditional\", \"unless\", \"volitional-cause\", \"volitional-result\"]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 21,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "adversative \t \n",
-      "adversative-antithesis \t \n",
-      "adversative-concession \t \n",
-      "adversative-contrast \t \n",
-      "alternative \t \n",
-      "antithesis \t contrast\n",
-      "attribution \t attribution\n",
-      "attribution-negative \t attribution\n",
-      "attribution-positive \t \n",
-      "background \t background\n",
-      "causal \t \n",
-      "causal-cause \t \n",
-      "causal-result \t \n",
-      "cause \t cause\n",
-      "cause-effect \t \n",
-      "circumstance \t background\n",
-      "comparison \t comparison\n",
-      "concession \t contrast\n",
-      "conclusion \t evaluation\n",
-      "condition \t condition\n",
-      "conjunction \t joint\n",
-      "context \t \n",
-      "context-background \t \n",
-      "context-circumstance \t \n",
-      "contingency \t condition\n",
-      "contingency-condition \t \n",
-      "contrast \t contrast\n",
-      "disjunction \t joint\n",
-      "e-elaboration \t elaboration\n",
-      "effect \t \n",
-      "elaboration \t elaboration\n",
-      "elaboration-additional \t elaboration\n",
-      "elaboration-attribute \t \n",
-      "enablement \t enablement\n",
-      "evaluation \t evaluation\n",
-      "evaluation-comment \t \n",
-      "evaluation-n \t \n",
-      "evaluation-s \t \n",
-      "evidence \t explanation\n",
-      "explanation \t explanation\n",
-      "explanation-evidence \t \n",
-      "explanation-justify \t \n",
-      "explanation-motivation \t \n",
-      "interpretation \t evaluation\n",
-      "interpretation-evaluation \t \n",
-      "joint \t joint\n",
-      "joint-disjunction \t \n",
-      "joint-list \t \n",
-      "joint-other \t \n",
-      "joint-sequence \t \n",
-      "justify \t explanation\n",
-      "list \t joint\n",
-      "manner-means \t \n",
-      "means \t manner-means\n",
-      "mode \t \n",
-      "mode-manner \t \n",
-      "mode-means \t \n",
-      "motivation \t explanation\n",
-      "nonvolitional-cause \t \n",
-      "nonvolitional-cause-e \t \n",
-      "nonvolitional-result \t \n",
-      "nonvolitional-result-e \t \n",
-      "organization \t \n",
-      "organization-heading \t \n",
-      "organization-phatic \t \n",
-      "organization-preparation \t \n",
-      "otherwise \t condition\n",
-      "parenthetical \t elaboration\n",
-      "preparation \t background\n",
-      "purpose \t enablement\n",
-      "purpose-attribute \t \n",
-      "purpose-goal \t \n",
-      "reason \t explanation\n",
-      "restatement \t summary\n",
-      "restatement-mn \t \n",
-      "restatement-partial \t \n",
-      "restatement-repetition \t \n",
-      "result \t cause\n",
-      "sequence \t temporal\n",
-      "solutionhood \t \n",
-      "summary \t summary\n",
-      "temporal \t \n",
-      "textual-organization \t textual-organization\n",
-      "topic \t \n",
-      "topic-change \t \n",
-      "topic-comment \t topic-comment\n",
-      "topic-drift \t topic-change\n",
-      "topic-question \t \n",
-      "topic-solutionhood \t \n",
-      "unconditional \t condition\n",
-      "unless \t condition\n",
-      "volitional-cause \t cause\n",
-      "volitional-result \t cause\n"
-     ]
-    }
-   ],
-   "source": [
-    "for label in rst:\n",
-    "    temp = ''\n",
-    "    for k, v in subs.items():\n",
-    "        if label in v:\n",
-    "            temp = k.lower()\n",
-    "        elif '-' in label:\n",
-    "            for l in label.split('-'):\n",
-    "                if l in v:\n",
-    "                    temp = temp = k.lower()\n",
-    "        elif '.' in label:\n",
-    "            for l in label.split('.'):\n",
-    "                if l in v:\n",
-    "                    temp = temp = k.lower()\n",
-    "        \n",
-    "    print(label, '\\t', temp)\n",
-    "        "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.6.7"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 4
-}
diff --git a/.ipynb_checkpoints/slurm-7061797-checkpoint.out b/.ipynb_checkpoints/slurm-7061797-checkpoint.out
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/.ipynb_checkpoints/slurm-7061801-checkpoint.out b/.ipynb_checkpoints/slurm-7061801-checkpoint.out
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/.ipynb_checkpoints/slurm-7061802-checkpoint.out b/.ipynb_checkpoints/slurm-7061802-checkpoint.out
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/.ipynb_checkpoints/slurm-7076839-checkpoint.out b/.ipynb_checkpoints/slurm-7076839-checkpoint.out
deleted file mode 100644
index e14bd414042799c59299a6401f4d91a063235986..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/slurm-7076839-checkpoint.out
+++ /dev/null
@@ -1,3 +0,0 @@
-Some weights of the model checkpoint at bert-base-multilingual-cased were not used when initializing BertModel: ['cls.seq_relationship.weight', 'cls.predictions.transform.LayerNorm.bias', 'cls.predictions.decoder.weight', 'cls.seq_relationship.bias', 'cls.predictions.transform.dense.bias', 'cls.predictions.transform.dense.weight', 'cls.predictions.transform.LayerNorm.weight', 'cls.predictions.bias']
-- This IS expected if you are initializing BertModel from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).
-- This IS NOT expected if you are initializing BertModel from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).
diff --git a/.ipynb_checkpoints/slurm-7076847-checkpoint.out b/.ipynb_checkpoints/slurm-7076847-checkpoint.out
deleted file mode 100644
index 5b8eb3181c0a2c7fcc3a5fcf72b54c8a59ca918f..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/slurm-7076847-checkpoint.out
+++ /dev/null
@@ -1,3 +0,0 @@
-Some weights of the model checkpoint at bert-base-multilingual-cased were not used when initializing BertModel: ['cls.predictions.transform.LayerNorm.weight', 'cls.predictions.transform.LayerNorm.bias', 'cls.predictions.transform.dense.weight', 'cls.predictions.decoder.weight', 'cls.predictions.transform.dense.bias', 'cls.predictions.bias', 'cls.seq_relationship.bias', 'cls.seq_relationship.weight']
-- This IS expected if you are initializing BertModel from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).
-- This IS NOT expected if you are initializing BertModel from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).
diff --git a/.ipynb_checkpoints/slurm-7076848-checkpoint.out b/.ipynb_checkpoints/slurm-7076848-checkpoint.out
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/.ipynb_checkpoints/slurm-7076849-checkpoint.out b/.ipynb_checkpoints/slurm-7076849-checkpoint.out
deleted file mode 100644
index a92e529c274205f4ee51561f394706b08961f4ee..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/slurm-7076849-checkpoint.out
+++ /dev/null
@@ -1,8 +0,0 @@
-Create classifier adapter
-
-Name: A_15-F_-1-2-3-4-M_braud
-Model: bert-base-multilingual-cased
-Batch size: 128
-Frozen layers: layer.1, layer.2, layer.3, layer.4
-
  0%|          | 0/175 [00:00<?, ?ba/s]
  1%|          | 1/175 [00:02<06:39,  2.30s/ba]
  1%|          | 2/175 [00:04<06:22,  2.21s/ba]
  2%|▏         | 3/175 [00:06<06:34,  2.30s/ba]
  2%|▏         | 4/175 [00:09<07:26,  2.61s/ba]
  3%|▎         | 5/175 [00:11<06:43,  2.37s/ba]
  3%|▎         | 6/175 [00:14<06:38,  2.36s/ba]
  4%|▍         | 7/175 [00:16<06:27,  2.30s/ba]
  5%|▍         | 8/175 [00:18<06:34,  2.36s/ba]
  5%|▌         | 9/175 [00:21<07:08,  2.58s/ba]
  6%|▌         | 10/175 [00:24<06:54,  2.51s/ba]
  6%|▋         | 11/175 [00:26<06:44,  2.47s/ba]
  7%|▋         | 12/175 [00:29<06:34,  2.42s/ba]
  7%|▋         | 13/175 [00:31<06:35,  2.44s/ba]
  8%|▊         | 14/175 [00:33<06:20,  2.36s/ba]
  9%|▊         | 15/175 [00:35<06:11,  2.32s/ba]
  9%|▉         | 16/175 [00:38<06:10,  2.33s/ba]
 10%|▉         | 17/175 [00:40<05:50,  2.22s/ba]
 10%|█         | 18/175 [00:42<06:02,  2.31s/ba]
 11%|█         | 19/175 [00:45<06:10,  2.37s/ba]
 11%|█▏        | 20/175 [00:47<06:01,  2.34s/ba]
 12%|█▏        | 21/175 [00:49<05:43,  2.23s/ba]
 13%|█▎        | 22/175 [00:52<05:55,  2.33s/ba]
 13%|█▎        | 23/175 [00:54<05:44,  2.27s/ba]
 14%|█▎        | 24/175 [00:56<05:23,  2.14s/ba]
 14%|█▍        | 25/175 [00:58<05:22,  2.15s/ba]
 15%|█▍        | 26/175 [01:00<05:13,  2.10s/ba]
 15%|█▌        | 27/175 [01:01<04:56,  2.00s/ba]
 16%|█▌        | 28/175 [01:04<05:18,  2.17s/ba]
 17%|█▋        | 29/175 [01:06<05:01,  2.07s/ba]
 17%|█▋        | 30/175 [01:09<05:26,  2.25s/ba]
 18%|█▊        | 31/175 [01:11<05:26,  2.27s/ba]
 18%|█▊        | 32/175 [01:13<05:34,  2.34s/ba]
 19%|█▉        | 33/175 [01:16<05:36,  2.37s/ba]
 19%|█▉        | 34/175 [01:18<05:26,  2.31s/ba]
 20%|██        | 35/175 [01:21<05:59,  2.57s/ba]
 21%|██        | 36/175 [01:24<06:05,  2.63s/ba]
 21%|██        | 37/175 [01:26<05:36,  2.44s/ba]
 22%|██▏       | 38/175 [01:28<05:13,  2.29s/ba]
 22%|██▏       | 39/175 [01:30<05:21,  2.37s/ba]
 23%|██▎       | 40/175 [01:33<05:18,  2.36s/ba]
 23%|██▎       | 41/175 [01:36<05:40,  2.54s/ba]
 24%|██▍       | 42/175 [01:38<05:44,  2.59s/ba]
 25%|██▍       | 43/175 [01:40<05:16,  2.40s/ba]
 25%|██▌       | 44/175 [01:42<05:04,  2.33s/ba]
 26%|██▌       | 45/175 [01:45<04:56,  2.28s/ba]
 26%|██▋       | 46/175 [01:47<04:42,  2.19s/ba]
 27%|██▋       | 47/175 [01:49<04:40,  2.19s/ba]
 27%|██▋       | 48/175 [01:51<04:28,  2.12s/ba]
 28%|██▊       | 49/175 [01:53<04:24,  2.10s/ba]
 29%|██▊       | 50/175 [01:55<04:38,  2.23s/ba]
 29%|██▉       | 51/175 [01:58<04:40,  2.26s/ba]
 30%|██▉       | 52/175 [02:00<04:26,  2.17s/ba]
 30%|███       | 53/175 [02:02<04:17,  2.11s/ba]
 31%|███       | 54/175 [02:05<04:46,  2.37s/ba]
 31%|███▏      | 55/175 [02:07<04:29,  2.25s/ba]
 32%|███▏      | 56/175 [02:09<04:47,  2.42s/ba]
 33%|███▎      | 57/175 [02:12<04:39,  2.37s/ba]
 33%|███▎      | 58/175 [02:13<04:17,  2.20s/ba]
 34%|███▎      | 59/175 [02:16<04:23,  2.27s/ba]
 34%|███▍      | 60/175 [02:18<04:28,  2.33s/ba]
 35%|███▍      | 61/175 [02:21<04:24,  2.32s/ba]
 35%|███▌      | 62/175 [02:23<04:15,  2.26s/ba]
 36%|███▌      | 63/175 [02:25<04:03,  2.17s/ba]
 37%|███▋      | 64/175 [02:28<04:30,  2.43s/ba]
 37%|███▋      | 65/175 [02:30<04:05,  2.23s/ba]
 38%|███▊      | 66/175 [02:32<04:00,  2.20s/ba]
 38%|███▊      | 67/175 [02:33<03:44,  2.08s/ba]
 39%|███▉      | 68/175 [02:35<03:39,  2.05s/ba]
 39%|███▉      | 69/175 [02:38<03:39,  2.07s/ba]
 40%|████      | 70/175 [02:40<03:52,  2.22s/ba]
 41%|████      | 71/175 [02:42<03:44,  2.16s/ba]
 41%|████      | 72/175 [02:44<03:34,  2.08s/ba]
 42%|████▏     | 73/175 [02:47<03:45,  2.21s/ba]
 42%|████▏     | 74/175 [02:49<03:40,  2.19s/ba]
 43%|████▎     | 75/175 [02:50<03:25,  2.06s/ba]
 43%|████▎     | 76/175 [02:52<03:16,  1.98s/ba]
 44%|████▍     | 77/175 [02:54<03:13,  1.97s/ba]
 45%|████▍     | 78/175 [02:57<03:21,  2.08s/ba]
 45%|████▌     | 79/175 [02:59<03:17,  2.05s/ba]
 46%|████▌     | 80/175 [03:01<03:18,  2.08s/ba]
 46%|████▋     | 81/175 [03:02<03:07,  2.00s/ba]
 47%|████▋     | 82/175 [03:04<03:04,  1.99s/ba]
 47%|████▋     | 83/175 [03:07<03:13,  2.10s/ba]
 48%|████▊     | 84/175 [03:09<03:16,  2.16s/ba]
 49%|████▊     | 85/175 [03:11<03:08,  2.09s/ba]
 49%|████▉     | 86/175 [03:14<03:21,  2.27s/ba]
 50%|████▉     | 87/175 [03:16<03:08,  2.14s/ba]
 50%|█████     | 88/175 [03:18<03:06,  2.14s/ba]
 51%|█████     | 89/175 [03:19<02:54,  2.03s/ba]
 51%|█████▏    | 90/175 [03:22<03:05,  2.18s/ba]
 52%|█████▏    | 91/175 [03:24<02:57,  2.11s/ba]
 53%|█████▎    | 92/175 [03:26<02:54,  2.10s/ba]
 53%|█████▎    | 93/175 [03:28<02:39,  1.95s/ba]
 54%|█████▎    | 94/175 [03:30<02:38,  1.96s/ba]
 54%|█████▍    | 95/175 [03:31<02:29,  1.87s/ba]
 55%|█████▍    | 96/175 [03:34<02:37,  1.99s/ba]
 55%|█████▌    | 97/175 [03:36<02:44,  2.11s/ba]
 56%|█████▌    | 98/175 [03:38<02:38,  2.06s/ba]
 57%|█████▋    | 99/175 [03:40<02:26,  1.93s/ba]
 57%|█████▋    | 100/175 [03:42<02:37,  2.09s/ba]
 58%|█████▊    | 101/175 [03:44<02:42,  2.19s/ba]
 58%|█████▊    | 102/175 [03:47<02:44,  2.25s/ba]
 59%|█████▉    | 103/175 [03:49<02:41,  2.24s/ba]
 59%|█████▉    | 104/175 [03:52<02:57,  2.50s/ba]
 60%|██████    | 105/175 [03:54<02:49,  2.42s/ba]
 61%|██████    | 106/175 [03:57<02:51,  2.49s/ba]
 61%|██████    | 107/175 [04:00<02:58,  2.63s/ba]
 62%|██████▏   | 108/175 [04:02<02:45,  2.48s/ba]
 62%|██████▏   | 109/175 [04:04<02:38,  2.40s/ba]
 63%|██████▎   | 110/175 [04:07<02:38,  2.43s/ba]
 63%|██████▎   | 111/175 [04:09<02:26,  2.29s/ba]
 64%|██████▍   | 112/175 [04:11<02:14,  2.13s/ba]
 65%|██████▍   | 113/175 [04:13<02:18,  2.23s/ba]
 65%|██████▌   | 114/175 [04:16<02:23,  2.36s/ba]
 66%|██████▌   | 115/175 [04:18<02:28,  2.48s/ba]
 66%|██████▋   | 116/175 [04:21<02:25,  2.47s/ba]
 67%|██████▋   | 117/175 [04:23<02:18,  2.39s/ba]
 67%|██████▋   | 118/175 [04:25<02:11,  2.30s/ba]
 68%|██████▊   | 119/175 [04:28<02:15,  2.42s/ba]
 69%|██████▊   | 120/175 [04:30<02:05,  2.29s/ba]
 69%|██████▉   | 121/175 [04:32<02:01,  2.24s/ba]
 70%|██████▉   | 122/175 [04:35<02:08,  2.43s/ba]
 70%|███████   | 123/175 [04:38<02:11,  2.52s/ba]
 71%|███████   | 124/175 [04:40<02:07,  2.50s/ba]
 71%|███████▏  | 125/175 [04:42<01:59,  2.39s/ba]
 72%|███████▏  | 126/175 [04:44<01:50,  2.26s/ba]
 73%|███████▎  | 127/175 [04:47<01:50,  2.31s/ba]
 73%|███████▎  | 128/175 [04:49<01:57,  2.50s/ba]
 74%|███████▎  | 129/175 [04:53<02:07,  2.76s/ba]
 74%|███████▍  | 130/175 [04:55<02:00,  2.67s/ba]
 75%|███████▍  | 131/175 [04:58<02:01,  2.75s/ba]
 75%|███████▌  | 132/175 [05:01<01:55,  2.68s/ba]
 76%|███████▌  | 133/175 [05:04<01:59,  2.85s/ba]
 77%|███████▋  | 134/175 [05:06<01:49,  2.67s/ba]
 77%|███████▋  | 135/175 [05:09<01:45,  2.63s/ba]
 78%|███████▊  | 136/175 [05:10<01:31,  2.34s/ba]
 78%|███████▊  | 137/175 [05:13<01:27,  2.30s/ba]
 79%|███████▉  | 138/175 [05:14<01:19,  2.14s/ba]
 79%|███████▉  | 139/175 [05:16<01:14,  2.08s/ba]
 80%|████████  | 140/175 [05:18<01:11,  2.05s/ba]
 81%|████████  | 141/175 [05:20<01:09,  2.04s/ba]
 81%|████████  | 142/175 [05:22<01:04,  1.95s/ba]
 82%|████████▏ | 143/175 [05:24<01:05,  2.05s/ba]
 82%|████████▏ | 144/175 [05:26<00:59,  1.92s/ba]
 83%|████████▎ | 145/175 [05:28<01:00,  2.01s/ba]
 83%|████████▎ | 146/175 [05:30<00:58,  2.03s/ba]
 84%|████████▍ | 147/175 [05:32<00:57,  2.06s/ba]
 85%|████████▍ | 148/175 [05:34<00:54,  2.03s/ba]
 85%|████████▌ | 149/175 [05:36<00:52,  2.01s/ba]
 86%|████████▌ | 150/175 [05:39<00:51,  2.05s/ba]
 86%|████████▋ | 151/175 [05:40<00:47,  1.97s/ba]
 87%|████████▋ | 152/175 [05:42<00:46,  2.02s/ba]
 87%|████████▋ | 153/175 [05:44<00:42,  1.94s/ba]
 88%|████████▊ | 154/175 [05:46<00:41,  1.97s/ba]
 89%|████████▊ | 155/175 [05:48<00:38,  1.93s/ba]
 89%|████████▉ | 156/175 [05:50<00:39,  2.06s/ba]
 90%|████████▉ | 157/175 [05:52<00:36,  2.03s/ba]
 90%|█████████ | 158/175 [05:55<00:35,  2.11s/ba]
 91%|█████████ | 159/175 [05:56<00:32,  2.01s/ba]
 91%|█████████▏| 160/175 [05:58<00:29,  1.96s/ba]
 92%|█████████▏| 161/175 [06:00<00:28,  2.01s/ba]
 93%|█████████▎| 162/175 [06:03<00:26,  2.05s/ba]
 93%|█████████▎| 163/175 [06:04<00:24,  2.01s/ba]
 94%|█████████▎| 164/175 [06:06<00:21,  1.94s/ba]
 94%|█████████▍| 165/175 [06:08<00:20,  2.02s/ba]
 95%|█████████▍| 166/175 [06:10<00:17,  1.90s/ba]
 95%|█████████▌| 167/175 [06:12<00:15,  1.88s/ba]
 96%|█████████▌| 168/175 [06:14<00:12,  1.85s/ba]
 97%|█████████▋| 169/175 [06:15<00:10,  1.83s/ba]
 97%|█████████▋| 170/175 [06:17<00:09,  1.87s/ba]
 98%|█████████▊| 171/175 [06:19<00:07,  1.84s/ba]
 98%|█████████▊| 172/175 [06:21<00:05,  1.77s/ba]
 99%|█████████▉| 173/175 [06:22<00:03,  1.71s/ba]
 99%|█████████▉| 174/175 [06:24<00:01,  1.74s/ba]
100%|██████████| 175/175 [06:26<00:00,  1.71s/ba]
100%|██████████| 175/175 [06:26<00:00,  2.21s/ba]
-
  0%|          | 0/24 [00:00<?, ?ba/s]
  4%|▍         | 1/24 [00:01<00:39,  1.71s/ba]
  8%|▊         | 2/24 [00:03<00:41,  1.87s/ba]
 12%|█▎        | 3/24 [00:05<00:41,  1.99s/ba]
 17%|█▋        | 4/24 [00:07<00:40,  2.04s/ba]
 21%|██        | 5/24 [00:09<00:36,  1.91s/ba]
 25%|██▌       | 6/24 [00:11<00:33,  1.85s/ba]
 29%|██▉       | 7/24 [00:13<00:31,  1.88s/ba]
 33%|███▎      | 8/24 [00:15<00:29,  1.83s/ba]
 38%|███▊      | 9/24 [00:16<00:27,  1.81s/ba]
 42%|████▏     | 10/24 [00:18<00:25,  1.80s/ba]
 46%|████▌     | 11/24 [00:20<00:23,  1.79s/ba]
 50%|█████     | 12/24 [00:22<00:21,  1.78s/ba]
 54%|█████▍    | 13/24 [00:23<00:19,  1.80s/ba]
 58%|█████▊    | 14/24 [00:25<00:17,  1.78s/ba]
 62%|██████▎   | 15/24 [00:27<00:15,  1.73s/ba]
 67%|██████▋   | 16/24 [00:28<00:13,  1.72s/ba]
 71%|███████   | 17/24 [00:30<00:12,  1.72s/ba]
 75%|███████▌  | 18/24 [00:32<00:10,  1.79s/ba]
\ No newline at end of file
diff --git a/.ipynb_checkpoints/slurm-7076851-checkpoint.out b/.ipynb_checkpoints/slurm-7076851-checkpoint.out
deleted file mode 100644
index 55259e64e1dbb864293fcc925df7fe234cceecd8..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/slurm-7076851-checkpoint.out
+++ /dev/null
@@ -1,7 +0,0 @@
-Create classifier adapter
-
-Name: A_15-F_-1-M_braud
-Model: bert-base-multilingual-cased
-Batch size: 128
-Frozen layers: layer.1
-
  0%|          | 0/175 [00:00<?, ?ba/s]
  1%|          | 1/175 [00:02<07:36,  2.63s/ba]
  1%|          | 2/175 [00:05<08:29,  2.95s/ba]
  2%|▏         | 3/175 [00:08<07:46,  2.71s/ba]
  2%|▏         | 4/175 [00:10<07:24,  2.60s/ba]
  3%|▎         | 5/175 [00:13<07:08,  2.52s/ba]
  3%|▎         | 6/175 [00:15<07:28,  2.65s/ba]
  4%|▍         | 7/175 [00:18<07:14,  2.59s/ba]
\ No newline at end of file
diff --git a/.ipynb_checkpoints/utils-checkpoint.py b/.ipynb_checkpoints/utils-checkpoint.py
deleted file mode 100644
index cd8220c5c7a4d8407813e978ab74e701734d2d1f..0000000000000000000000000000000000000000
--- a/.ipynb_checkpoints/utils-checkpoint.py
+++ /dev/null
@@ -1,259 +0,0 @@
-#!/usr/bin/env python
-# coding: utf-8
-
-import os
-import torch
-from transformers import AutoConfig, AutoTokenizer
-from configure import parse_args
-import numpy as np
-from sklearn.metrics import accuracy_score
-
-args = parse_args()
-
-
-def open_mappings(mappings_file):
-    
-    ''' Open the mappings file into a dictionary.'''
-    
-    mappings = {}
-    with open(mappings_file, 'r') as f:
-        next(f)
-        for l in f:
-            mappings[l.split('\t')[0]] = int(l.strip().split('\t')[-1])
-    inv_mappings = {v:k for k, v in mappings.items()}
-
-    return mappings, inv_mappings
-
-
-def open_file(filename, mappings_dict):   
-    
-    ''' Function to open a .rels file. 
-        Arguments: 
-        - filename: the path to a .rels file 
-        - mappings_dict: a dictionary of mappings of unique labels to integers
-        Returns a list of lists, where each list is:
-        the line + [two sentences combined with special BERT token, encoded label]
-    '''
-    
-    max_len = 254 # 512 (max bert len) / 2 (2 sents) -2 (special tokens)
-    lines = []
-    SEP_token = '[SEP]'
-
-    with open(filename, 'r', encoding='utf-8') as f:
-        next(f)
-        for line in f:
-            l = line.strip().split('\t')
-            
-            if len(l) > 1:
-                # chop the sentences to max_len if too long
-                sent_1 = l[3].split(' ')
-                sent_2 = l[4].split(' ')      
-                
-                if len(sent_1) > max_len:
-                    sent_1 = sent_1[:max_len]
-                if len(sent_2) > max_len:
-                    sent_2 = sent_2[:max_len]
-                
-                # flip them if different direction
-                if args.normalize_direction == 'yes':
-                    if l[9] == '1>2':
-                        lines.append(l + [sent_1 + [SEP_token] + sent_2, mappings_dict[l[11].lower()]])
-                    else:
-                        lines.append(l + [sent_2 + [SEP_token] + sent_1, mappings_dict[l[11].lower()]])
-                else:
-                    lines.append(l + [sent_1 + [SEP_token] + sent_2, mappings[l[11].lower()]])
-
-    return lines
-
-
-def encode_batch(batch):
-    
-    """ Encodes a batch of input data using the model tokenizer.
-        Works for a pandas DF column, instead of a list.
-    """
-    tokenizer = AutoTokenizer.from_pretrained(args.transformer_model)
-    return tokenizer(batch["text"], 
-                     max_length=512, 
-                     truncation=True, 
-                     padding="max_length"
-                    )
-
-def open_sentences(path_to_corpora, mappings_dict):
-    ''' Opens all the corpora and the surprise corpora in train/dev/test sets.
-        Uses the open_file() function from utils.
-        Returns:
-        - list of sentences for TRAIN: all the corpora and surprise corpora together
-        - dict of sentences for DEV: each dev set categorized per corpus
-        - dict of sentences for TEST: each test set categorized per corpus
-    '''
-    
-    corpora = [folder for folder in os.listdir(path_to_corpora) 
-               if not any(i in folder for i in ['.md', 'DS_', 'utils', 'ipynb'])]
-               
-    # ---------------------
-    train_sentences     = []
-    dev_dict_sentences  = {}
-    test_dict_sentences = {}
-
-    for corpus in corpora:
-        
-        try:
-            # open normal files   
-            train_file = ['/'.join([path_to_corpora, corpus, x])
-                              for x in os.listdir(path_to_corpora + '/' + corpus) 
-                              if 'train' in x and 'rels' in x][0]
-            train_sentences += open_file(train_file, mappings_dict)
-        except: # some of them don't have train
-            pass
-
-        #open each test separately
-        dev_dict_sentences[corpus] = []
-        dev_file = ['/'.join([path_to_corpora,corpus,x])
-                              for x in os.listdir(path_to_corpora + '/' + corpus) 
-                              if 'dev' in x and 'rels' in x][0] 
-        dev_dict_sentences[corpus] += open_file(dev_file, mappings_dict)
-
-        #open each test separately
-        test_dict_sentences[corpus] = []
-        test_file = ['/'.join([path_to_corpora,corpus,x])
-                              for x in os.listdir(path_to_corpora + '/' + corpus) 
-                              if 'test' in x and 'rels' in x][0] 
-        test_dict_sentences[corpus] += open_file(test_file, mappings_dict)
-
-    
-    return train_sentences, dev_dict_sentences, test_dict_sentences
-
-
-# ===============
-# Testing functions
-# ===============
-
-def get_predictions(model,
-                    corpus, 
-                    test_dataloader, 
-                    print_results=True):
-    
-    ''' Function to get the model's predictions for one corpus' test set.
-        Can print accuracy using scikit-learn.
-        Also works with dev sets -- just don't save the outputs.
-        Returns: list of predictions that match test file's lines.
-    '''
-    
-    device = torch.device("cuda" if args.use_cuda else "cpu")
-
-    if args.use_cuda:
-        model = model.cuda()
-    
-    model.eval()
-    test_loss, test_accuracy = 0, 0
-
-    all_labels = []
-    all_preds = []
-    
-    with torch.no_grad():
-        for test_input, test_label in test_dataloader:
-
-            mask = test_input['attention_mask'].to(device)
-            input_id = test_input['input_ids'].squeeze(1).to(device)
-            output = model(input_id, mask)
-
-            logits = output[0]
-            logits = logits.detach().cpu().numpy()
-            label_ids = test_label.to('cpu').numpy()
-
-            all_labels += label_ids.tolist()
-            all_preds += output.argmax(dim=1).tolist()
-
-        assert len(all_labels) == len(all_preds)
-        test_acc = round(accuracy_score(all_labels, all_preds), 4)
-    
-    if print_results:
-        print(corpus, '\tAccuracy:\t', test_acc)
-    
-    return all_preds
-    
-    
-def get_predictions_huggingface(trainer,
-                                corpus, 
-                                test_set, 
-                                print_results=True):
-    
-    ''' SPECIFI FUNCTION FOR THE HUGGINGFACE TRAINER.
-        Function to get the model's predictions for one corpus' test set.
-        Can print accuracy using scikit-learn.
-        Also works with dev sets -- just don't save the outputs.
-        Returns: list of predictions that match test file's lines.
-    '''
-
-    results = trainer.predict(test_set)
-    preds = np.argmax(results.predictions, axis=1)
-    results = results.label_ids
-    test_acc = round(accuracy_score(preds, results), 4)
-    
-    if print_results:
-        print(corpus, '\tAccuracy:\t', test_acc, '\n')
-    
-    return preds
-    
-    
-def print_results_to_file(corpus, 
-                          test_sentences, 
-                          test_results, 
-                          inv_mappings_dict, 
-                          #substitutions_file, 
-                          output_folder):
-    
-    ''' Function to print a new file with the test predictions per 
-        the specifications of the Shared task.
-        Returns: one file per corpus with predictions.
-    '''
-#     output_folder = 'results'
-    header = '\t'.join(['doc',
-                         'unit1_toks',
-                         'unit2_toks',
-                         'unit1_txt',
-                         'unit2_txt',
-                         's1_toks',
-                         's2_toks',
-                         'unit1_sent',
-                         'unit2_sent',
-                         'dir',
-                         'orig_label',
-                         'label', 
-                         'predicted_label'])
-    
-#     # create a dict of all the substitutions that were made
-#     revert_substitutions = {}
-#     with open(substitutions_file, 'r', encoding='utf-8') as f:
-#         next(f)
-#         for line in f:
-#             l = line.strip().split('\t')
-#             if not l[1] in revert_substitutions:
-#                 revert_substitutions[l[1]] = {}
-#             revert_substitutions[l[1]][l[2]] = l[0]
-    
-    # save the results in a separate folder, one file per corpus
-    if not os.path.exists(output_folder):
-        os.makedirs(output_folder)
-    
-    results_to_write = []
-    
-    for n, sent in enumerate(test_sentences):
-        label = test_results[n]
-        label = inv_mappings_dict[label]
-#         try:
-#             if corpus in revert_substitutions:
-#                 if label in revert_substitutions[corpus]:
-#                     label = revert_substitutions[corpus][label]
-#         except:
-#             pass
-        temp  = sent[:] + [label]
-        results_to_write.append(temp)
-        
-    assert len(results_to_write) == len(test_sentences)
-    
-    with open(output_folder + '/' + corpus + '.tsv', 'a+', encoding='utf-8') as f:
-        f.write(header + '\n')
-        for line in results_to_write:
-            f.write('\t'.join([str(x) for x in line]))
-            f.write('\n')
\ No newline at end of file
diff --git a/mappings/.ipynb_checkpoints/mappings-classes-braud-checkpoint.tsv b/mappings/.ipynb_checkpoints/mappings-classes-braud-checkpoint.tsv
deleted file mode 100644
index 5f266baef15da72dd587e0d88cf082256c03928b..0000000000000000000000000000000000000000
--- a/mappings/.ipynb_checkpoints/mappings-classes-braud-checkpoint.tsv
+++ /dev/null
@@ -1,162 +0,0 @@
-LABEL	CLASS	MAPPINGadversative	contrast	1
-adversative-antithesis	contrast	1
-adversative-concession	contrast	1
-adversative-contrast	contrast	1
-alternative	condition	10
-antithesis	contrast	1
-attribution	attribution	2
-attribution-negative	attribution	2
-attribution-positive	attribution	2
-background	background	8
-causal	cause	6
-causal-cause	cause	6
-causal-result	cause	6
-cause	cause	6
-cause-effect	cause	6
-circumstance	background	8
-comparison	comparison	12
-concession	contrast	1
-conclusion	evaluation	13
-condition	condition	10
-conjunction	joint	16
-context	explanation	15
-context-background	background	8
-context-circumstance	background	8
-contingency	condition	10
-contingency-condition	condition	10
-contrast	contrast	1
-disjunction	same-unit	11
-e-elaboration	elaboration	3
-effect	cause	6
-elaboration	elaboration	3
-elaboration-additional	elaboration	3
-elaboration-attribute	elaboration	3
-enablement	enablement	14
-evaluation	evaluation	13
-evaluation-comment	evaluation	13
-evaluation-n	evaluation	13
-evaluation-s	evaluation	13
-evidence	explanation	15
-explanation	explanation	15
-explanation-evidence	explanation	15
-explanation-justify	explanation	15
-explanation-motivation	explanation	15
-interpretation	evaluation	13
-interpretation-evaluation	evaluation	13
-joint	joint	16
-joint-disjunction	joint	16
-joint-list	joint	16
-joint-other	joint	16
-joint-sequence	temporal	17
-justify	explanation	15
-list	joint	16
-manner-means	manner-means	4
-means	manner-means	4
-mode	manner-means	4
-mode-manner	manner-means	4
-mode-means	manner-means	4
-motivation	explanation	15
-nonvolitional-cause	cause	6
-nonvolitional-cause-e	cause	6
-nonvolitional-result	cause	6
-nonvolitional-result-e	cause	6
-organization	textual-organization	0
-organization-heading	textual-organization	0
-organization-phatic	textual-organization	0
-organization-preparation	textual-organization	0
-otherwise	condition	10
-parenthetical	same-unit	11
-preparation	background	8
-purpose	enablement	14
-purpose-attribute	enablement	14
-purpose-goal	enablement	14
-reason	explanation	15
-restatement	summary	5
-restatement-mn	summary	5
-restatement-partial	summary	5
-restatement-repetition	summary	5
-result	cause	6
-sequence	temporal	17
-solutionhood	topic-comment	7
-summary	summary	5
-temporal	temporal	17
-textual-organization	textual-organization	0
-topic	topic-comment	7
-topic-change	topic-change	9
-topic-comment	topic-comment	7
-topic-drift	topic-change	9
-topic-question	topic-comment	7
-topic-solutionhood	topic-comment	7
-unconditional	condition	10
-unless	condition	10
-volitional-cause	cause	6
-volitional-result	cause	6
-causation	cause	6
-comparison.concession	contrast	1
-comparison.concession+speechact	comparison	12
-comparison.contrast	contrast	1
-comparison.degree	comparison	12
-comparison.similarity	comparison	12
-conditional	condition	10
-contingency.cause	condition	10
-contingency.cause+belief	condition	10
-contingency.cause+speechact	condition	10
-contingency.condition	condition	10
-contingency.condition+speechact	condition	10
-contingency.goal	condition	10
-contingency.negative-cause	cause	6
-contingency.negative-condition	condition	10
-contingency.purpose	enablement	14
-expansion	elaboration	3
-expansion.alternative	condition	10
-expansion.conjunction	joint	16
-expansion.correction	contrast	1
-expansion.disjunction	cause	6
-expansion.equivalence	comparison	12
-expansion.exception	contrast	1
-expansion.instantiation	elaboration	3
-expansion.level-of-detail	elaboration	3
-expansion.manner	manner-means	4
-expansion.restatement	summary	5
-expansion.substitution	contrast	1
-hypophora	topic-comment	7
-interrupted	topic-change	9
-progression	temporal	17
-repetition	elaboration	3
-temporal.asynchronous	temporal	17
-temporal.synchronous	temporal	17
-temporal.synchrony	temporal	17
-qap	topic-comment	7
-contingency.negative-condition+speechact	condition	10
-contingency.negative	condition	10
-expansion.genexpansion	elaboration	3
-expansion.level	elaboration	3
-qap.hypophora	topic-comment	7
-bg-compare	background	8
-bg-general	background	8
-bg-goal	background	8
-cause-result	cause	6
-elab-addition	elaboration	3
-elab-aspect	elaboration	3
-elab-definition	elaboration	3
-elab-enumember	elaboration	3
-elab-example	elaboration	3
-elab-process_step	elaboration	3
-exp-evidence	explanation	15
-exp-reason	explanation	15
-findings	cause	6
-acknowledgement	attribution	2
-alternation	condition	10
-clarification_question	topic-comment	7
-comment	evaluation	13
-continuation	joint	16
-correction	contrast	1
-explanation*	explanation	15
-flashback	explanation	15
-frame	explanation	15
-goal	enablement	14
-narration	elaboration	3
-parallel	joint	16
-q_elab	elaboration	3
-question_answer_pair	topic-comment	7
-temploc	temporal	17
diff --git a/mappings/.ipynb_checkpoints/mappings_substitutions-checkpoint.tsv b/mappings/.ipynb_checkpoints/mappings_substitutions-checkpoint.tsv
deleted file mode 100644
index 52b18b508dc6caa76872254d3c80698e8497399a..0000000000000000000000000000000000000000
--- a/mappings/.ipynb_checkpoints/mappings_substitutions-checkpoint.tsv
+++ /dev/null
@@ -1,163 +0,0 @@
-mode-means	0
-expansion.restatement	1
-expansion.substitution	2
-bg-compare	3
-root	4
-organization-preparation	5
-topic-solutionhood	6
-evaluation-n	7
-contingency.negative-cause	8
-organization	9
-causal	10
-elab-enumember	11
-organization-phatic	12
-purpose-attribute	13
-mode	14
-temporal	15
-contingency.cause+belief	16
-means	17
-expansion	18
-comparison.concession+speechact	19
-parallel	20
-contingency.condition	21
-context-circumstance	22
-restatement-partial	23
-expansion.equivalence	24
-interrupted	25
-contingency.negative-condition	26
-comment	27
-organization-heading	28
-joint-other	29
-result	30
-expansion.alternative	31
-parenthetical	32
-clarification_question	33
-background	34
-conjunction	77
-nonvolitional-result-e	36
-manner-means	37
-elaboration-additional	38
-attribution	39
-volitional-result	40
-contingency.negative	41
-mode-manner	42
-expansion.level-of-detail	43
-topic-comment	44
-joint-sequence	45
-elab-addition	46
-explanation*	47
-comparison.similarity	48
-reason	49
-solutionhood	50
-nonvolitional-cause	51
-contingency.negative-condition+speechact	52
-topic-question	53
-elab-definition	54
-hypophora	55
-adversative	56
-elaboration-attribute	57
-nonvolitional-result	58
-joint	59
-bg-goal	60
-contrast	61
-explanation-justify	62
-context-background	63
-topic-drift	64
-contingency.purpose	65
-explanation	66
-elaboration	67
-elab-example	68
-evaluation-comment	69
-continuation	70
-exp-reason	71
-interpretation	72
-conclusion	73
-attribution-negative	74
-flashback	75
-frame	76
-expansion.conjunction	77
-preparation	78
-temporal.asynchronous	79
-attribution-positive	80
-acknowledgement	81
-comparison.contrast	82
-condition	83
-contingency.goal	84
-restatement-repetition	85
-temploc	86
-adversative-contrast	87
-topic-change	88
-context	89
-effect	90
-expansion.correction	91
-contingency.cause	92
-progression	93
-evaluation-s	94
-explanation-evidence	95
-volitional-cause	96
-concession	97
-expansion.exception	98
-summary	99
-comparison.degree	100
-adversative-concession	101
-comparison	102
-topic	103
-expansion.instantiation	104
-purpose-goal	105
-evaluation	106
-expansion.disjunction	107
-explanation-motivation	108
-nonvolitional-cause-e	109
-question_answer_pair	110
-restatement-mn	111
-contingency.cause+speechact	112
-cause-effect	113
-purpose	114
-enablement	115
-cause	116
-e-elaboration	117
-contingency.condition+speechact	118
-interpretation-evaluation	119
-adversative-antithesis	120
-antithesis	121
-expansion.manner	122
-comparison.concession	123
-narration	124
-contingency-condition	125
-contingency	126
-temporal.synchronous	127
-circumstance	128
-q_elab	129
-causal-cause	130
-joint-list	131
-elab-aspect	132
-elab-process_step	133
-causal-result	134
-alternation	31
-conditional	83
-goal	105
-correction	91
-alternative	31
-disjunction	107
-evidence	95
-justify	62
-list	131
-motivation	108
-restatement	1
-sequence	45
-unless	61
-causation	116
-bg-general	34
-exp-evidence	95
-otherwise	56
-unconditional	107
-joint-disjunction	107
-repetition	85
-temporal.synchrony	127
-textual-organization	9
-cause-result	113
-findings	30
-qap	110
-expansion.level	43
-qap.hypophora	55
-expansion.genexpansion	18
diff --git a/mappings/.ipynb_checkpoints/specific_results-checkpoint.txt b/mappings/.ipynb_checkpoints/specific_results-checkpoint.txt
deleted file mode 100644
index 46f7a3786a570956d9e5a8b37204a80d501b10e4..0000000000000000000000000000000000000000
--- a/mappings/.ipynb_checkpoints/specific_results-checkpoint.txt
+++ /dev/null
@@ -1,27 +0,0 @@
-BEST	EPOCH	Corpus
-B	3	deu.rst.pcc
-A1_3	4	eng.dep.covdtb
-B	4	eng.dep.scidtb
-B	4	eng.pdtb.pdtb
-A1_3	4	eng.pdtb.tedm
-A1_3	4	eng.rst.gum
-B	5	eng.rst.rstdt
-A1_3	4	eng.sdrt.stac
-A1_3	4	eus.rst.ert
-B	3	fas.rst.prstc
-A1_3	4	fra.sdrt.annodis
-A1_3	4	ita.pdtb.luna
-A1_3	4	nld.rst.nldt
-A1	3	por.pdtb.crpc
-A1_3	4	por.pdtb.tedm
-A1_3	4	por.rst.cstn
-A1_3	4	rus.rst.rrt
-A1_3	4	spa.rst.rststb
-B	5	spa.rst.sctb
-A1_3	4	tha.pdtb.tdtb
-B	3	tur.pdtb.tdb
-A1_3	4	tur.pdtb.tedm
-A1	3	zho.dep.scidtb
-B	4	zho.pdtb.cdtb
-A1	3	zho.rst.gcdt
-A1_3	4	zho.rst.sctb
\ No newline at end of file
diff --git a/mappings/.ipynb_checkpoints/substitions-classes-braud-checkpoint.tsv b/mappings/.ipynb_checkpoints/substitions-classes-braud-checkpoint.tsv
deleted file mode 100644
index ae4b1fd5d5249f2af6313cbf0b3fca01c4d287a0..0000000000000000000000000000000000000000
--- a/mappings/.ipynb_checkpoints/substitions-classes-braud-checkpoint.tsv
+++ /dev/null
@@ -1,163 +0,0 @@
-LABEL	CLASS
-adversative	contrast
-adversative-antithesis	contrast
-adversative-concession	contrast
-adversative-contrast	contrast
-alternative	condition
-antithesis	contrast
-attribution	attribution
-attribution-negative	attribution
-attribution-positive	attribution
-background	background
-causal	cause
-causal-cause	cause
-causal-result	cause
-cause	cause
-cause-effect	cause
-circumstance	background
-comparison	comparison
-concession	contrast
-conclusion	evaluation
-condition	condition
-conjunction	joint
-context	explanation
-context-background	background
-context-circumstance	background
-contingency	condition
-contingency-condition	condition
-contrast	contrast
-disjunction	same-unit
-e-elaboration	elaboration
-effect	cause
-elaboration	elaboration
-elaboration-additional	elaboration
-elaboration-attribute	elaboration
-enablement	enablement
-evaluation	evaluation
-evaluation-comment	evaluation
-evaluation-n	evaluation
-evaluation-s	evaluation
-evidence	explanation
-explanation	explanation
-explanation-evidence	explanation
-explanation-justify	explanation
-explanation-motivation	explanation
-interpretation	evaluation
-interpretation-evaluation	evaluation
-joint	joint
-joint-disjunction	joint
-joint-list	joint
-joint-other	joint
-joint-sequence	temporal
-justify	explanation
-list	joint
-manner-means	manner-means
-means	manner-means
-mode	manner-means
-mode-manner	manner-means
-mode-means	manner-means
-motivation	explanation
-nonvolitional-cause	cause
-nonvolitional-cause-e	cause
-nonvolitional-result	cause
-nonvolitional-result-e	cause
-organization	textual-organization
-organization-heading	textual-organization
-organization-phatic	textual-organization
-organization-preparation	textual-organization
-otherwise	condition
-parenthetical	same-unit
-preparation	background
-purpose	enablement
-purpose-attribute	enablement
-purpose-goal	enablement
-reason	explanation
-restatement	summary
-restatement-mn	summary
-restatement-partial	summary
-restatement-repetition	summary
-result	cause
-sequence	temporal
-solutionhood	topic-comment
-summary	summary
-temporal	temporal
-textual-organization	textual-organization
-topic	topic-comment
-topic-change	topic-change
-topic-comment	topic-comment
-topic-drift	topic-change
-topic-question	topic-comment
-topic-solutionhood	topic-comment
-unconditional	condition
-unless	condition
-volitional-cause	cause
-volitional-result	cause
-causation	cause
-comparison.concession	contrast
-comparison.concession+speechact	comparison
-comparison.contrast	contrast
-comparison.degree	comparison
-comparison.similarity	comparison
-conditional	condition
-contingency.cause	condition
-contingency.cause+belief	condition
-contingency.cause+speechact	condition
-contingency.condition	condition
-contingency.condition+speechact	condition
-contingency.goal	condition
-contingency.negative-cause	cause
-contingency.negative-condition	condition
-contingency.purpose	enablement
-expansion	elaboration
-expansion.alternative	condition
-expansion.conjunction	joint
-expansion.correction	contrast
-expansion.disjunction	cause
-expansion.equivalence	comparison
-expansion.exception	contrast
-expansion.instantiation	elaboration
-expansion.level-of-detail	elaboration
-expansion.manner	manner-means
-expansion.restatement	summary
-expansion.substitution	contrast
-hypophora	topic-comment
-interrupted	topic-change
-progression	temporal
-repetition	elaboration
-temporal.asynchronous	temporal
-temporal.synchronous	temporal
-temporal.synchrony	temporal
-qap	topic-comment
-contingency.negative-condition+speechact	condition
-contingency.negative	condition
-expansion.genexpansion	elaboration
-expansion.level	elaboration
-qap.hypophora	topic-comment
-bg-compare	background
-bg-general	background
-bg-goal	background
-cause-result	cause
-elab-addition	elaboration
-elab-aspect	elaboration
-elab-definition	elaboration
-elab-enumember	elaboration
-elab-example	elaboration
-elab-process_step	elaboration
-exp-evidence	explanation
-exp-reason	explanation
-findings	cause
-acknowledgement	attribution
-alternation	condition
-clarification_question	topic-comment
-comment	evaluation
-continuation	joint
-correction	contrast
-explanation*	explanation
-flashback	explanation
-frame	explanation
-goal	enablement
-narration	elaboration
-parallel	joint
-q_elab	elaboration
-question_answer_pair	topic-comment
-temploc	temporal
diff --git a/mappings/.ipynb_checkpoints/substitions-classes-rst-checkpoint.tsv b/mappings/.ipynb_checkpoints/substitions-classes-rst-checkpoint.tsv
deleted file mode 100644
index 64485b9c0d9fac1701d0d614846210762a872f74..0000000000000000000000000000000000000000
--- a/mappings/.ipynb_checkpoints/substitions-classes-rst-checkpoint.tsv
+++ /dev/null
@@ -1,163 +0,0 @@
-LABEL	CLASS
-adversative	contrast
-adversative-antithesis	contrast
-adversative-concession	contrast
-adversative-contrast	contrast
-alternative	condition
-antithesis	contrast
-attribution	attribution
-attribution-negative	attribution
-attribution-positive	attribution
-background	background
-causal	cause
-causal-cause	cause
-causal-result	cause
-cause	cause
-cause-effect	cause
-circumstance	background
-comparison	comparison
-concession	contrast
-conclusion	evaluation
-condition	condition
-conjunction	joint
-context	background
-context-background	background
-context-circumstance	background
-contingency	condition
-contingency-condition	condition
-contrast	contrast
-disjunction	joint
-e-elaboration	elaboration
-effect	cause
-elaboration	elaboration
-elaboration-additional	elaboration
-elaboration-attribute	elaboration
-enablement	enablement
-evaluation	evaluation
-evaluation-comment	evaluation
-evaluation-n	evaluation
-evaluation-s	evaluation
-evidence	explanation
-explanation	explanation
-explanation-evidence	explanation
-explanation-justify	explanation
-explanation-motivation	explanation
-interpretation	evaluation
-interpretation-evaluation	evaluation
-joint	joint
-joint-disjunction	joint
-joint-list	joint
-joint-other	joint
-joint-sequence	temporal
-justify	explanation
-list	joint
-manner-means	manner-means
-means	manner-means
-mode	manner-means
-mode-manner	manner-means
-mode-means	manner-means
-motivation	explanation
-nonvolitional-cause	cause
-nonvolitional-cause-e	cause
-nonvolitional-result	cause
-nonvolitional-result-e	cause
-organization	background
-organization-heading	background
-organization-phatic	background
-organization-preparation	background
-otherwise	condition
-parenthetical	elaboration
-preparation	background
-purpose	enablement
-purpose-attribute	enablement
-purpose-goal	enablement
-reason	explanation
-restatement	summary
-restatement-mn	summary
-restatement-partial	summary
-restatement-repetition	summary
-result	cause
-sequence	temporal
-solutionhood	topic-comment
-summary	summary
-temporal	temporal
-textual-organization	background
-topic	topic-comment
-topic-change	topic-change
-topic-comment	topic-comment
-topic-drift	topic change
-topic-question	topic-comment
-topic-solutionhood	topic-comment
-unconditional	condition
-unless	condition
-volitional-cause	cause
-volitional-result	cause
-causation	cause
-comparison.concession	contrast
-comparison.concession+speechact	comparison
-comparison.contrast	contrast
-comparison.degree	comparison
-comparison.similarity	comparison
-conditional	condition
-contingency.cause	condition
-contingency.cause+belief	condition
-contingency.cause+speechact	condition
-contingency.condition	condition
-contingency.condition+speechact	condition
-contingency.goal	condition
-contingency.negative-cause	cause
-contingency.negative-condition	condition
-contingency.purpose	enablement
-expansion	elaboration
-expansion.alternative	condition
-expansion.conjunction	joint
-expansion.correction	contrast
-expansion.disjunction	joint
-expansion.equivalence	comparison
-expansion.exception	contrast
-expansion.instantiation	elaboration
-expansion.level-of-detail	elaboration
-expansion.manner	manner-means
-expansion.restatement	summary
-expansion.substitution	contrast
-hypophora	topic-comment
-interrupted	topic-change
-progression	temporal
-repetition	elaboration
-temporal.asynchronous	temporal
-temporal.synchronous	temporal
-temporal.synchrony	temporal
-qap	topic-comment
-contingency.negative-condition+speechact	condition
-contingency.negative	condition
-expansion.genexpansion	elaboration
-expansion.level	elaboration
-qap.hypophora	topic-comment
-bg-compare	background
-bg-general	background
-bg-goal	background
-cause-result	cause
-elab-addition	elaboration
-elab-aspect	elaboration
-elab-definition	elaboration
-elab-enumember	elaboration
-elab-example	elaboration
-elab-process_step	elaboration
-exp-evidence	explanation
-exp-reason	explanation
-findings	cause
-acknowledgement	attribution
-alternation	condition
-clarification_question	topic-comment
-comment	evaluation
-continuation	joint
-correction	contrast
-explanation*	explanation
-flashback	explanation
-frame	explanation
-goal	enablement
-narration	elaboration
-parallel	joint
-q_elab	elaboration
-question_answer_pair	topic-comment
-temploc	temporal
diff --git a/mappings/.ipynb_checkpoints/substitutions-checkpoint.txt b/mappings/.ipynb_checkpoints/substitutions-checkpoint.txt
deleted file mode 100644
index 4fbb7f93846b3599ade1a59263d318e325522763..0000000000000000000000000000000000000000
--- a/mappings/.ipynb_checkpoints/substitutions-checkpoint.txt
+++ /dev/null
@@ -1,751 +0,0 @@
-ORIGINAL-LABEL	CORPUS	SUBSTITUTION
-disjunction	deu.rst.pcc	expansion.disjunction
-evidence	deu.rst.pcc	explanation-evidence
-list	deu.rst.pcc	joint-list
-restatement	deu.rst.pcc	expansion.restatement
-sequence	deu.rst.pcc	joint-sequence
-condition	deu.rst.pcc	conditional
-contrast	deu.rst.pcc	unless
-conjunction	deu.rst.pcc	expansion.conjunction
-background	deu.rst.pcc	bg-general
-cause	deu.rst.pcc	causation
-ATTRIBUTION	eng.dep.covdtb	attribution
-BACKGROUND	eng.dep.covdtb	background
-CAUSE-RESULT	eng.dep.covdtb	cause-effect
-COMPARISON	eng.dep.covdtb	comparison
-CONDITION	eng.dep.covdtb	condition
-ELABORATION	eng.dep.covdtb	elaboration
-ENABLEMENT	eng.dep.covdtb	enablement
-FINDINGS	eng.dep.covdtb	result
-JOINT	eng.dep.covdtb	joint
-MANNER-MEANS	eng.dep.covdtb	manner-means
-TEMPORAL	eng.dep.covdtb	temporal
-TEXTUAL-ORGANIZATION	eng.dep.covdtb	organization
-TEMPORAL	eng.dep.covdtb	temporal.synchrony
-ELABORATION	eng.dep.covdtb	context
-JOINT	eng.dep.covdtb	expansion.conjunction
-CAUSE-RESULT	eng.dep.covdtb	causation
-ELABORATION	eng.dep.covdtb	elab-aspect
-MANNER-MEANS	eng.dep.covdtb	mode
-ENABLEMENT	eng.dep.covdtb	contingency.purpose
-JOINT	eng.dep.covdtb	joint-disjunction
-CAUSE-RESULT	eng.dep.covdtb	contingency.cause
-CONDITION	eng.dep.covdtb	conditional
-TEXTUAL-ORGANIZATION	eng.dep.covdtb	textual-organization
-COMPARISON	eng.dep.covdtb	unless
-COMPARISON	eng.dep.covdtb	comparison.contrast
-CAUSE-RESULT	eng.dep.covdtb	causal
-FINDINGS	eng.dep.covdtb	evaluation
-ELABORATION	eng.dep.covdtb	explanation
-ELABORATION	eng.dep.covdtb	expansion.level
-ELABORATION	eng.dep.covdtb	expansion.instantiation
-ELABORATION	eng.dep.covdtb	elab-addition
-COMPARISON	eng.dep.covdtb	otherwise
-COMPARISON	eng.dep.covdtb	comparison.concession
-TEMPORAL	eng.dep.covdtb	temporal.asynchronous
-ELABORATION	eng.dep.covdtb	restatement
-BACKGROUND	eng.dep.covdtb	bg-general
-MANNER-MEANS	eng.dep.covdtb	expansion.manner
-CONDITION	eng.dep.covdtb	contingency.condition
-ENABLEMENT	eng.dep.covdtb	purpose
-bg-general	eng.dep.scidtb	background
-exp-evidence	eng.dep.scidtb	explanation-evidence
-temporal	eng.dep.scidtb	context
-joint	eng.dep.scidtb	expansion.conjunction
-elab-addition	eng.dep.scidtb	contingency
-cause	eng.dep.scidtb	causation
-manner-means	eng.dep.scidtb	mode
-cause	eng.dep.scidtb	contingency.cause
-condition	eng.dep.scidtb	conditional
-contrast	eng.dep.scidtb	unless
-contrast	eng.dep.scidtb	comparison.contrast
-elab-addition	eng.dep.scidtb	elaboration
-elab-addition	eng.dep.scidtb	explanation
-joint	eng.dep.scidtb	expansion.level
-contrast	eng.dep.scidtb	otherwise
-temporal	eng.dep.scidtb	temporal.asynchronous
-temporal	eng.dep.scidtb	restatement
-enablement	eng.dep.scidtb	purpose
-Comparison.Concession	eng.pdtb.pdtb	comparison.concession
-Comparison.Concession+SpeechAct	eng.pdtb.pdtb	comparison.concession+speechact
-Comparison.Contrast	eng.pdtb.pdtb	comparison.contrast
-Comparison.Similarity	eng.pdtb.pdtb	comparison.similarity
-Contingency.Cause	eng.pdtb.pdtb	contingency.cause
-Contingency.Cause+Belief	eng.pdtb.pdtb	contingency.cause+belief
-Contingency.Cause+SpeechAct	eng.pdtb.pdtb	contingency.cause+speechact
-Contingency.Condition	eng.pdtb.pdtb	contingency.condition
-Contingency.Condition+SpeechAct	eng.pdtb.pdtb	contingency.condition+speechact
-Contingency.Negative-cause	eng.pdtb.pdtb	contingency.negative-cause
-Contingency.Negative-condition	eng.pdtb.pdtb	contingency.negative-condition
-Contingency.Purpose	eng.pdtb.pdtb	contingency.purpose
-Expansion.Conjunction	eng.pdtb.pdtb	expansion.conjunction
-Expansion.Disjunction	eng.pdtb.pdtb	expansion.disjunction
-Expansion.Equivalence	eng.pdtb.pdtb	expansion.equivalence
-Expansion.Exception	eng.pdtb.pdtb	expansion.exception
-Expansion.Instantiation	eng.pdtb.pdtb	expansion.instantiation
-Expansion.Level-of-detail	eng.pdtb.pdtb	expansion.level-of-detail
-Expansion.Manner	eng.pdtb.pdtb	expansion.manner
-Expansion.Substitution	eng.pdtb.pdtb	expansion.substitution
-Hypophora	eng.pdtb.pdtb	hypophora
-Temporal.Asynchronous	eng.pdtb.pdtb	temporal.asynchronous
-Temporal.Synchronous	eng.pdtb.pdtb	temporal.synchronous
-Acknowledgement	eng.pdtb.pdtb	acknowledgement
-Temporal.Synchronous	eng.pdtb.pdtb	temporal.synchrony
-Hypophora	eng.pdtb.pdtb	qap.hypophora
-Temporal.Synchronous	eng.pdtb.pdtb	context
-Contingency.Condition	eng.pdtb.pdtb	contingency
-Contingency.Purpose	eng.pdtb.pdtb	qap
-Contingency.Negative-condition	eng.pdtb.pdtb	mode
-Contingency.Condition	eng.pdtb.pdtb	enablement
-expansion.disjunction	eng.pdtb.pdtb	joint-disjunction
-Contingency.Condition	eng.pdtb.pdtb	conditional
-Hypophora	eng.pdtb.pdtb	q_elab
-Expansion.Conjunction	eng.pdtb.pdtb	joint
-Expansion.Conjunction	eng.pdtb.pdtb	elaboration
-Contingency.Cause	eng.pdtb.pdtb	causal
-Contingency.Condition	eng.pdtb.pdtb	comparison
-Contingency.Cause	eng.pdtb.pdtb	explanation
-expansion.level-of-detail	eng.pdtb.pdtb	expansion.level
-adversative	eng.pdtb.pdtb	otherwise
-Expansion.Conjunction	eng.pdtb.pdtb	continuation
-Contingency.Purpose	eng.pdtb.pdtb	purpose
-Comparison.Concession	eng.pdtb.tedm	comparison.concession
-Comparison.Contrast	eng.pdtb.tedm	comparison.contrast
-Comparison.Similarity	eng.pdtb.tedm	comparison.similarity
-Contingency.Cause	eng.pdtb.tedm	contingency.cause
-Contingency.Cause+Belief	eng.pdtb.tedm	contingency.cause+belief
-Contingency.Cause+SpeechAct	eng.pdtb.tedm	contingency.cause+speechact
-Contingency.Condition	eng.pdtb.tedm	contingency.condition
-Contingency.Purpose	eng.pdtb.tedm	contingency.purpose
-Expansion.Conjunction	eng.pdtb.tedm	expansion.conjunction
-Expansion.Disjunction	eng.pdtb.tedm	expansion.disjunction
-Expansion.Equivalence	eng.pdtb.tedm	expansion.equivalence
-Expansion.Instantiation	eng.pdtb.tedm	expansion.instantiation
-Expansion.Level-of-detail	eng.pdtb.tedm	expansion.level-of-detail
-Expansion.Manner	eng.pdtb.tedm	expansion.manner
-Expansion.Substitution	eng.pdtb.tedm	expansion.substitution
-Hypophora	eng.pdtb.tedm	hypophora
-Temporal.Asynchronous	eng.pdtb.tedm	temporal.asynchronous
-Temporal.Synchronous	eng.pdtb.tedm	temporal.synchronous
-Temporal.Synchronous	eng.pdtb.tedm	temporal.synchrony
-Temporal.Synchronous	eng.pdtb.tedm	context
-Contingency.Condition	eng.pdtb.tedm	contingency
-Expansion.Manner	eng.pdtb.tedm	mode
-Expansion.Manner	eng.pdtb.tedm	enablement
-Expansion.Disjunction	eng.pdtb.tedm	joint-disjunction
-Contingency.Condition	eng.pdtb.tedm	conditional
-Expansion.Conjunction	eng.pdtb.tedm	joint
-Comparison.Contrast	eng.pdtb.tedm	unless
-Contingency.Cause	eng.pdtb.tedm	textual-organization
-Expansion.Level-of-detail	eng.pdtb.tedm	elaboration
-Contingency.Cause	eng.pdtb.tedm	causal
-Contingency.Cause	eng.pdtb.tedm	explanation
-Expansion.Level-of-detail	eng.pdtb.tedm	expansion.level
-Expansion.Manner	eng.pdtb.tedm	manner-means
-Comparison.Contrast	eng.pdtb.tedm	otherwise
-Contingency.Purpose	eng.pdtb.tedm	purpose
-restatement	eng.rst.gum	expansion.restatement
-elaboration	eng.rst.gum	acknowledgement
-context	eng.rst.gum	temporal.synchrony
-topic	eng.rst.gum	qap.hypophora
-joint	eng.rst.gum	expansion.conjunction
-causal	eng.rst.gum	causation
-topic	eng.rst.gum	qap
-purpose	eng.rst.gum	enablement
-explanation	eng.rst.gum	expansion.substitution
-purpose	eng.rst.gum	contingency.purpose
-causal	eng.rst.gum	contingency.cause
-contingency	eng.rst.gum	conditional
-elaboration	eng.rst.gum	q_elab
-organization	eng.rst.gum	textual-organization
-adversative	eng.rst.gum	unless
-adversative	eng.rst.gum	comparison.contrast
-topic	eng.rst.gum	clarification_question
-adversative	eng.rst.gum	comparison
-elaboration	eng.rst.gum	expansion.level
-mode	eng.rst.gum	manner-means
-context	eng.rst.gum	expansion.instantiation
-elaboration	eng.rst.gum	elab-addition
-adversative	eng.rst.gum	otherwise
-adversative	eng.rst.gum	comparison.concession
-joint	eng.rst.gum	comparison.similarity
-context	eng.rst.gum	temporal.asynchronous
-context	eng.rst.gum	temporal
-explanation	eng.rst.gum	continuation
-explanation	eng.rst.gum	bg-general
-mode	eng.rst.gum	expansion.manner
-contingency	eng.rst.gum	contingency.condition
-textual-organization	eng.rst.rstdt	organization
-temporal.synchronous	eng.rst.rstdt	temporal.synchrony
-hypophora	eng.rst.rstdt	qap.hypophora
-circumstance	eng.rst.rstdt	context
-joint	eng.rst.rstdt	expansion.conjunction
-condition	eng.rst.rstdt	contingency
-contrast	eng.rst.rstdt	expansion.substitution
-enablement	eng.rst.rstdt	contingency.purpose
-joint	eng.rst.rstdt	joint-disjunction
-cause	eng.rst.rstdt	contingency.cause
-condition	eng.rst.rstdt	conditional
-condition	eng.rst.rstdt	contingency.negative-condition
-contrast	eng.rst.rstdt	unless
-comparison	eng.rst.rstdt	comparison.contrast
-cause	eng.rst.rstdt	causal
-topic-comment	eng.rst.rstdt	solutionhood
-elaboration	eng.rst.rstdt	expansion.level
-summary	eng.rst.rstdt	parenthetical
-elaboration	eng.rst.rstdt	expansion.instantiation
-elaboration	eng.rst.rstdt	elab-addition
-adversative	eng.rst.rstdt	otherwise
-comparison	eng.rst.rstdt	comparison.concession
-comparison	eng.rst.rstdt	comparison.similarity
-temporal	eng.rst.rstdt	temporal.asynchronous
-expansion.restatement	eng.rst.rstdt	restatement
-background	eng.rst.rstdt	bg-general
-manner-means	eng.rst.rstdt	expansion.manner
-condition	eng.rst.rstdt	contingency.condition
-enablement	eng.rst.rstdt	purpose
-Acknowledgement	eng.sdrt.stac	acknowledgement
-Alternation	eng.sdrt.stac	expansion.alternative
-Background	eng.sdrt.stac	background
-Clarification_question	eng.sdrt.stac	clarification_question
-Comment	eng.sdrt.stac	comment
-Conditional	eng.sdrt.stac	condition
-Continuation	eng.sdrt.stac	continuation
-Contrast	eng.sdrt.stac	contrast
-Correction	eng.sdrt.stac	expansion.correction
-Elaboration	eng.sdrt.stac	elaboration
-Explanation	eng.sdrt.stac	explanation
-Narration	eng.sdrt.stac	narration
-Parallel	eng.sdrt.stac	parallel
-Q_Elab	eng.sdrt.stac	q_elab
-Question_answer_pair	eng.sdrt.stac	question_answer_pair
-Result	eng.sdrt.stac	result
-Acknowledgement	eng.sdrt.stac	attribution
-Elaboration	eng.sdrt.stac	context
-Continuation	eng.sdrt.stac	contingency
-Question_answer_pair	eng.sdrt.stac	qap
-Elaboration	eng.sdrt.stac	contingency.cause
-Conditional	eng.sdrt.stac	conditional
-Continuation	eng.sdrt.stac	joint
-Contrast	eng.sdrt.stac	unless
-Continuation	eng.sdrt.stac	textual-organization
-Result	eng.sdrt.stac	causal
-Elaboration	eng.sdrt.stac	evaluation
-Continuation	eng.sdrt.stac	expansion.level
-Contrast	eng.sdrt.stac	otherwise
-Elaboration	eng.sdrt.stac	temporal.asynchronous
-Correction	eng.sdrt.stac	restatement
-Conditional	eng.sdrt.stac	contingency.condition
-disjunction	eus.rst.ert	expansion.disjunction
-evidence	eus.rst.ert	explanation-evidence
-justify	eus.rst.ert	explanation-justify
-list	eus.rst.ert	joint-list
-motivation	eus.rst.ert	explanation-motivation
-otherwise	eus.rst.ert	adversative
-restatement	eus.rst.ert	expansion.restatement
-sequence	eus.rst.ert	joint-sequence
-unconditional	eus.rst.ert	expansion.disjunction
-unless	eus.rst.ert	contrast
-condition	eus.rst.ert	conditional
-preparation	eus.rst.ert	attribution
-result	eus.rst.ert	findings
-background	eus.rst.ert	bg-general
-expansion.disjunction	eus.rst.ert	joint-disjunction
-conjunction	eus.rst.ert	expansion.conjunction
-cause	eus.rst.ert	causation
-background	eus.rst.ert	bg-general
-condition	fas.rst.prstc	conditional
-elaboration	fas.rst.prstc	preparation
-contrast	fas.rst.prstc	unless
-joint	fas.rst.prstc	expansion.conjunction
-background	fas.rst.prstc	bg-general
-cause	fas.rst.prstc	causation
-alternation	fra.sdrt.annodis	expansion.alternative
-conditional	fra.sdrt.annodis	condition
-goal	fra.sdrt.annodis	purpose-goal
-elaboration	fra.sdrt.annodis	joint
-contrast	fra.sdrt.annodis	unless
-e-elaboration	fra.sdrt.annodis	purpose
-Comparison	ita.pdtb.luna	comparison
-Comparison.Concession	ita.pdtb.luna	comparison.concession
-Comparison.Contrast	ita.pdtb.luna	comparison.contrast
-Contingency.Cause	ita.pdtb.luna	contingency.cause
-Contingency.Condition	ita.pdtb.luna	contingency.condition
-Contingency.Goal	ita.pdtb.luna	contingency.goal
-Expansion.Alternative	ita.pdtb.luna	expansion.alternative
-Expansion.Conjunction	ita.pdtb.luna	expansion.conjunction
-Expansion.Instantiation	ita.pdtb.luna	expansion.instantiation
-Expansion.Restatement	ita.pdtb.luna	expansion.restatement
-Interrupted	ita.pdtb.luna	interrupted
-Repetition	ita.pdtb.luna	restatement-repetition
-Temporal.Asynchronous	ita.pdtb.luna	temporal.asynchronous
-Temporal.Synchrony	ita.pdtb.luna	temporal.synchronous
-Temporal.Synchrony	ita.pdtb.luna	temporal.synchrony
-Expansion.Conjunction	ita.pdtb.luna	joint
-Contingency.Cause	ita.pdtb.luna	contingency.purpose
-Expansion.Restatement	ita.pdtb.luna	restatement
-Expansion.Disjunction	ita.pdtb.luna	joint-disjunction
-disjunction	nld.rst.nldt	expansion.disjunction
-evidence	nld.rst.nldt	explanation-evidence
-justify	nld.rst.nldt	explanation-justify
-list	nld.rst.nldt	joint-list
-motivation	nld.rst.nldt	explanation-motivation
-otherwise	nld.rst.nldt	adversative
-restatement	nld.rst.nldt	expansion.restatement
-sequence	nld.rst.nldt	joint-sequence
-unconditional	nld.rst.nldt	expansion.disjunction
-unless	nld.rst.nldt	contrast
-condition	nld.rst.nldt	conditional
-background	nld.rst.nldt	bg-general
-conjunction	nld.rst.nldt	expansion.conjunction
-disjunction	nld.rst.nldt	joint-disjunction
-Comparison	por.pdtb.crpc	comparison
-Comparison.Concession	por.pdtb.crpc	comparison.concession
-Comparison.Contrast	por.pdtb.crpc	comparison.contrast
-Comparison.Similarity	por.pdtb.crpc	comparison.similarity
-Contingency.Cause	por.pdtb.crpc	contingency.cause
-Contingency.Condition	por.pdtb.crpc	contingency.condition
-Contingency.Negative	por.pdtb.crpc	contingency.negative
-Contingency.Purpose	por.pdtb.crpc	contingency.purpose
-Expansion.Conjunction	por.pdtb.crpc	expansion.conjunction
-Expansion.Disjunction	por.pdtb.crpc	expansion.disjunction
-Expansion.Equivalence	por.pdtb.crpc	expansion.equivalence
-Expansion.Exception	por.pdtb.crpc	expansion.exception
-Expansion.Instantiation	por.pdtb.crpc	expansion.instantiation
-Expansion.Level	por.pdtb.crpc	expansion.level-of-detail
-Expansion.Manner	por.pdtb.crpc	expansion.manner
-Expansion.Substitution	por.pdtb.crpc	expansion.substitution
-Hypophora	por.pdtb.crpc	hypophora
-QAP	por.pdtb.crpc	question_answer_pair
-QAP.Hypophora	por.pdtb.crpc	hypophora
-Temporal	por.pdtb.crpc	temporal
-Temporal.Asynchronous	por.pdtb.crpc	temporal.asynchronous
-Temporal.Synchronous	por.pdtb.crpc	temporal.synchronous
-Acknowledgement	por.pdtb.crpc	acknowledgement
-Temporal.Synchronous	por.pdtb.crpc	temporal.synchrony
-Expansion.Conjunction	por.pdtb.crpc	preparation
-Expansion.Conjunction	por.pdtb.crpc	list
-Contingency.Condition	por.pdtb.crpc	circumstance
-Expansion.Disjunction	por.pdtb.crpc	joint-disjunction
-Expansion.Conjunction	por.pdtb.crpc	joint
-Expansion.Level	por.pdtb.crpc	textual-organization
-Expansion.Conjunction	por.pdtb.crpc	elaboration
-Expansion.Level	por.pdtb.crpc	expansion.level
-Expansion.Level	por.pdtb.crpc	parenthetical
-Contingency.Purpose	por.pdtb.crpc	purpose
-Comparison.Concession	por.pdtb.tedm	comparison.concession
-Comparison.Contrast	por.pdtb.tedm	comparison.contrast
-Comparison.Similarity	por.pdtb.tedm	comparison.similarity
-Contingency.Cause	por.pdtb.tedm	contingency.cause
-Contingency.Cause+Belief	por.pdtb.tedm	contingency.cause+belief
-Contingency.Condition	por.pdtb.tedm	contingency.condition
-Contingency.Condition+SpeechAct	por.pdtb.tedm	contingency.condition+speechact
-Contingency.Purpose	por.pdtb.tedm	contingency.purpose
-Expansion.Conjunction	por.pdtb.tedm	expansion.conjunction
-Expansion.Disjunction	por.pdtb.tedm	expansion.disjunction
-Expansion.Equivalence	por.pdtb.tedm	expansion.equivalence
-Expansion.Instantiation	por.pdtb.tedm	expansion.instantiation
-Expansion.Level-of-detail	por.pdtb.tedm	expansion.level-of-detail
-Expansion.Manner	por.pdtb.tedm	expansion.manner
-Expansion.Substitution	por.pdtb.tedm	expansion.substitution
-Hypophora	por.pdtb.tedm	hypophora
-Temporal.Asynchronous	por.pdtb.tedm	temporal.asynchronous
-Temporal.Synchronous	por.pdtb.tedm	temporal.synchronous
-Contingency.Cause	por.pdtb.tedm	explanation
-Expansion.Level-of-detail	por.pdtb.tedm	expansion.level
-Temporal.Synchronous	por.pdtb.tedm	temporal.synchrony
-Expansion.Level-of-detail	por.pdtb.tedm	elaboration
-Expansion.Disjunction	por.pdtb.tedm	joint-disjunction
-Expansion.Conjunction	por.pdtb.tedm	list
-Contingency.Purpose	por.pdtb.tedm	purpose
-evidence	por.rst.cstn	explanation-evidence
-justify	por.rst.cstn	explanation-justify
-list	por.rst.cstn	joint-list
-motivation	por.rst.cstn	explanation-motivation
-otherwise	por.rst.cstn	adversative
-restatement	por.rst.cstn	expansion.restatement
-sequence	por.rst.cstn	joint-sequence
-elaboration	por.rst.cstn	expansion.level
-condition	por.rst.cstn	conditional
-circumstance	por.rst.cstn	temporal.asynchronous
-list	por.rst.cstn	expansion.conjunction
-elaboration	por.rst.cstn	contingency.cause
-evidence	rus.rst.rrt	explanation-evidence
-motivation	rus.rst.rrt	explanation-motivation
-restatement	rus.rst.rrt	expansion.restatement
-sequence	rus.rst.rrt	joint-sequence
-condition	rus.rst.rrt	conditional
-contrast	rus.rst.rrt	unless
-joint	rus.rst.rrt	expansion.conjunction
-background	rus.rst.rrt	bg-general
-cause	rus.rst.rrt	causation
-alternative	spa.rst.rststb	expansion.alternative
-disjunction	spa.rst.rststb	expansion.disjunction
-evidence	spa.rst.rststb	explanation-evidence
-justify	spa.rst.rststb	explanation-justify
-list	spa.rst.rststb	joint-list
-motivation	spa.rst.rststb	explanation-motivation
-restatement	spa.rst.rststb	expansion.restatement
-sequence	spa.rst.rststb	joint-sequence
-unless	spa.rst.rststb	contrast
-condition	spa.rst.rststb	conditional
-means	spa.rst.rststb	manner-means
-result	spa.rst.rststb	findings
-background	spa.rst.rststb	bg-general
-cause	spa.rst.rststb	causation
-disjunction	spa.rst.sctb	expansion.disjunction
-evidence	spa.rst.sctb	explanation-evidence
-justify	spa.rst.sctb	explanation-justify
-list	spa.rst.sctb	joint-list
-motivation	spa.rst.sctb	explanation-motivation
-restatement	spa.rst.sctb	expansion.restatement
-sequence	spa.rst.sctb	joint-sequence
-condition	spa.rst.sctb	conditional
-result	spa.rst.sctb	findings
-list	spa.rst.sctb	joint
-contrast	spa.rst.sctb	unless
-background	spa.rst.sctb	bg-general
-Comparison.Concession	tha.pdtb.tdtb	comparison.concession
-Comparison.Contrast	tha.pdtb.tdtb	comparison.contrast
-Comparison.Similarity	tha.pdtb.tdtb	comparison.similarity
-Contingency.Cause	tha.pdtb.tdtb	contingency.cause
-Contingency.Cause+Belief	tha.pdtb.tdtb	contingency.cause+belief
-Contingency.Cause+SpeechAct	tha.pdtb.tdtb	contingency.cause+speechact
-Contingency.Condition	tha.pdtb.tdtb	contingency.condition
-Contingency.Negative-Condition	tha.pdtb.tdtb	contingency.negative-condition
-Contingency.Negative-Condition+SpeechAct	tha.pdtb.tdtb	contingency.negative-condition+speechact
-Contingency.Purpose	tha.pdtb.tdtb	contingency.purpose
-Expansion.Conjunction	tha.pdtb.tdtb	expansion.conjunction
-Expansion.Disjunction	tha.pdtb.tdtb	expansion.disjunction
-Expansion.Equivalence	tha.pdtb.tdtb	expansion.equivalence
-Expansion.Exception	tha.pdtb.tdtb	expansion.exception
-Expansion.GenExpansion	tha.pdtb.tdtb	expansion
-Expansion.Instantiation	tha.pdtb.tdtb	expansion.instantiation
-Expansion.Level-of-detail	tha.pdtb.tdtb	expansion.level-of-detail
-Expansion.Substitution	tha.pdtb.tdtb	expansion.substitution
-Temporal.Asynchronous	tha.pdtb.tdtb	temporal.asynchronous
-Temporal.Synchronous	tha.pdtb.tdtb	temporal.synchronous
-Temporal.Synchronous	tha.pdtb.tdtb	temporal.synchrony
-Expansion.GenExpansion	tha.pdtb.tdtb	expansion.genexpansion
-Expansion.Disjunction	tha.pdtb.tdtb	joint-disjunction
-Comparison.Concession	tur.pdtb.tdb	comparison.concession
-Comparison.Concession+SpeechAct	tur.pdtb.tdb	comparison.concession+speechact
-Comparison.Contrast	tur.pdtb.tdb	comparison.contrast
-Comparison.Degree	tur.pdtb.tdb	comparison.degree
-Comparison.Similarity	tur.pdtb.tdb	comparison.similarity
-Contingency.Cause	tur.pdtb.tdb	contingency.cause
-Contingency.Cause+Belief	tur.pdtb.tdb	contingency.cause+belief
-Contingency.Cause+SpeechAct	tur.pdtb.tdb	contingency.cause+speechact
-Contingency.Condition	tur.pdtb.tdb	contingency.condition
-Contingency.Negative-condition	tur.pdtb.tdb	contingency.negative-condition
-Contingency.Purpose	tur.pdtb.tdb	contingency.purpose
-Expansion.Conjunction	tur.pdtb.tdb	expansion.conjunction
-Expansion.Correction	tur.pdtb.tdb	expansion.correction
-Expansion.Disjunction	tur.pdtb.tdb	expansion.disjunction
-Expansion.Equivalence	tur.pdtb.tdb	expansion.equivalence
-Expansion.Exception	tur.pdtb.tdb	expansion.exception
-Expansion.Instantiation	tur.pdtb.tdb	expansion.instantiation
-Expansion.Level-of-detail	tur.pdtb.tdb	expansion.level-of-detail
-Expansion.Manner	tur.pdtb.tdb	expansion.manner
-Expansion.Substitution	tur.pdtb.tdb	expansion.substitution
-Hypophora	tur.pdtb.tdb	hypophora
-Temporal.Asynchronous	tur.pdtb.tdb	temporal.asynchronous
-Temporal.Synchronous	tur.pdtb.tdb	temporal.synchronous
-Expansion.Level-of-detail	tur.pdtb.tdb	expansion.level
-Temporal.Synchronous	tur.pdtb.tdb	temporal.synchrony
-Hypophora	tur.pdtb.tdb	qap.hypophora
-Expansion.Disjunction	tur.pdtb.tdb	joint-disjunction
-Comparison.Concession	tur.pdtb.tedm	comparison.concession
-Comparison.Concession+SpeechAct	tur.pdtb.tedm	comparison.concession+speechact
-Comparison.Contrast	tur.pdtb.tedm	comparison.contrast
-Comparison.Similarity	tur.pdtb.tedm	comparison.similarity
-Contingency.Cause	tur.pdtb.tedm	contingency.cause
-Contingency.Cause+Belief	tur.pdtb.tedm	contingency.cause+belief
-Contingency.Cause+SpeechAct	tur.pdtb.tedm	contingency.cause+speechact
-Contingency.Condition	tur.pdtb.tedm	contingency.condition
-Contingency.Negative-condition	tur.pdtb.tedm	contingency.negative-condition
-Contingency.Purpose	tur.pdtb.tedm	contingency.purpose
-Expansion	tur.pdtb.tedm	expansion
-Expansion.Conjunction	tur.pdtb.tedm	expansion.conjunction
-Expansion.Disjunction	tur.pdtb.tedm	expansion.disjunction
-Expansion.Equivalence	tur.pdtb.tedm	expansion.equivalence
-Expansion.Exception	tur.pdtb.tedm	expansion.exception
-Expansion.Instantiation	tur.pdtb.tedm	expansion.instantiation
-Expansion.Level-of-detail	tur.pdtb.tedm	expansion.level-of-detail
-Expansion.Manner	tur.pdtb.tedm	expansion.manner
-Expansion.Substitution	tur.pdtb.tedm	expansion.substitution
-Hypophora	tur.pdtb.tedm	hypophora
-Temporal.Asynchronous	tur.pdtb.tedm	temporal.asynchronous
-Temporal.Synchronous	tur.pdtb.tedm	temporal.synchronous
-Expansion.Level-of-detail	tur.pdtb.tedm	expansion.level
-Temporal.Synchronous	tur.pdtb.tedm	temporal.synchrony
-Expansion.Disjunction	tur.pdtb.tedm	joint-disjunction
-bg-general	zho.dep.scidtb	background
-exp-evidence	zho.dep.scidtb	explanation-evidence
-ROOT	zho.dep.scidtb	root
-condition	zho.dep.scidtb	conditional
-temporal	zho.dep.scidtb	context-circumstance
-contrast	zho.dep.scidtb	unless
-expansion	zho.dep.scidtb	expansion.genexpansion
-elab-addition	zho.dep.scidtb	elaboration
-Alternative	zho.pdtb.cdtb	expansion.alternative
-Causation	zho.pdtb.cdtb	cause
-Conditional	zho.pdtb.cdtb	condition
-Conjunction	zho.pdtb.cdtb	conjunction
-Contrast	zho.pdtb.cdtb	contrast
-Expansion	zho.pdtb.cdtb	expansion
-Progression	zho.pdtb.cdtb	progression
-Purpose	zho.pdtb.cdtb	purpose
-Temporal	zho.pdtb.cdtb	temporal
-Progression	zho.pdtb.cdtb	topic-question
-Conjunction	zho.pdtb.cdtb	list
-Conditional	zho.pdtb.cdtb	contingency-condition
-Conjunction	zho.pdtb.cdtb	expansion.conjunction
-Causation	zho.pdtb.cdtb	causation
-Expansion	zho.pdtb.cdtb	organization-heading
-Expansion	zho.pdtb.cdtb	elaboration-additional
-Purpose	zho.pdtb.cdtb	attribution-positive
-Contrast	zho.pdtb.cdtb	joint-disjunction
-Conditional	zho.pdtb.cdtb	conditional
-Conditional	zho.pdtb.cdtb	context-circumstance
-Conditional	zho.pdtb.cdtb	causal-cause
-Contrast	zho.pdtb.cdtb	unless
-Conjunction	zho.pdtb.cdtb	expansion.genexpansion
-Contrast	zho.pdtb.cdtb	comparison.contrast
-Conjunction	zho.pdtb.cdtb	elaboration
-Conjunction	zho.pdtb.cdtb	sequence
-Expansion	zho.pdtb.cdtb	exp-evidence
-Contrast	zho.pdtb.cdtb	adversative-contrast
-joint-disjunction	zho.rst.gcdt	expansion.disjunction
-attribution-positive	zho.rst.gcdt	attribution
-joint-list	zho.rst.gcdt	list
-joint-list	zho.rst.gcdt	expansion.conjunction
-cause	zho.rst.gcdt	causation
-causal-result	zho.rst.gcdt	enablement
-purpose-goal	zho.rst.gcdt	goal
-condition	zho.rst.gcdt	conditional
-adversative-concession	zho.rst.gcdt	concession
-contrast	zho.rst.gcdt	unless
-elaboration-additional	zho.rst.gcdt	expansion.genexpansion
-elaboration-additional	zho.rst.gcdt	elaboration
-adversative-antithesis	zho.rst.gcdt	comparison
-elaboration-additional	zho.rst.gcdt	elab-addition
-joint-sequence	zho.rst.gcdt	sequence
-explanation-evidence	zho.rst.gcdt	exp-evidence
-joint-other	zho.rst.gcdt	temporal
-purpose-goal	zho.rst.gcdt	purpose
-disjunction	zho.rst.sctb	expansion.disjunction
-evidence	zho.rst.sctb	explanation-evidence
-justify	zho.rst.sctb	explanation-justify
-list	zho.rst.sctb	joint-list
-motivation	zho.rst.sctb	explanation-motivation
-restatement	zho.rst.sctb	expansion.restatement
-sequence	zho.rst.sctb	joint-sequence
-condition	zho.rst.sctb	conditional
-circumstance	zho.rst.sctb	context-circumstance
-means	zho.rst.sctb	manner-means
-result	zho.rst.sctb	findings
-elaboration	zho.rst.sctb	elaboration-attribute
-list	zho.rst.sctb	joint
-contrast	zho.rst.sctb	unless
-evidence	zho.rst.sctb	exp-evidence
-condition	zho.rst.sctb	contingency-condition
-preparation	zho.rst.sctb	organization-heading
-elaboration	zho.rst.sctb	expansion.genexpansion
-elaboration	zho.rst.sctb	joint-disjunction
-elaboration	zho.rst.sctb	expansion.conjunction
-cause	zho.rst.sctb	causation
-elaboration	zho.rst.sctb	elaboration-additional
-evidence	deu.rst.pcc	exp-evidence
-BACKGROUND	eng.dep.covdtb	bg-goal
-COMPARISON	eng.dep.covdtb	comparison.similarity
-CONDITION	eng.dep.covdtb	contingency
-ELABORATION	eng.dep.covdtb	elab-enumember
-ELABORATION	eng.dep.covdtb	elab-process_step
-CAUSE-RESULT	eng.dep.covdtb	exp-reason
-CAUSE-RESULT	eng.dep.covdtb	findings
-TEXTUAL-ORGANIZATION	eng.dep.covdtb	preparation
-ELABORATION	eng.dep.covdtb	summary
-Comparison.Concession	eng.pdtb.pdtb	adversative
-Temporal.Synchronous	eng.pdtb.pdtb	bg-general
-Hypophora	eng.pdtb.pdtb	clarification_question
-Temporal.Asynchronous	eng.pdtb.pdtb	temporal
-Expansion.Conjunction	eng.pdtb.tedm	attribution
-Temporal.Asynchronous	eng.pdtb.tedm	continuation
-Expansion.Conjunction	eng.pdtb.tedm	evaluation
-evaluation	eng.rst.gum	comment
-elaboration-additional	eng.rst.gum	elab-enumember
-causal-cause	eng.rst.gum	exp-reason
-organization-preparation	eng.rst.gum	preparation
-contrast	eng.rst.rstdt	adversative
-cause	eng.rst.rstdt	causation
-background	eng.rst.rstdt	circumstance
-evaluation	eng.rst.rstdt	comment
-summary	eng.rst.rstdt	expansion.restatement
-topic-comment	eng.rst.rstdt	hypophora
-means	eng.rst.rstdt	mode
-topic-comment	eng.rst.rstdt	qap
-temporal	eng.rst.rstdt	temporal.synchronous
-temporal	eng.rst.rstdt	temporal.asynchronous
-topic-comment	eng.rst.rstdt	topic
-Correction	eng.sdrt.stac	expansion.conjunction
-Continuation	eng.sdrt.stac	expansion.instantiation
-Result	eng.sdrt.stac	findings
-Question_answer_pair	eng.sdrt.stac	topic
-background	fra.sdrt.annodis	bg-general
-result	fra.sdrt.annodis	findings
-Interrupted	ita.pdtb.luna	comment
-Comparison	ita.pdtb.luna	comparison.similarity
-Expansion.Resttatement	ita.pdtb.luna	Expansion.Disjunction
-Expansion.Conjunction	ita.pdtb.luna	expansion.level
-Contingency.Goal	ita.pdtb.luna	purpose
-Repetition	ita.pdtb.luna	repetition
-Expansion.Conjunction	por.pdtb.crpc	comment
-Expansion.Conjunction	por.pdtb.crpc	sequence
-Comparison.Contrast	por.pdtb.tedm	unless
-Comparison.Contrast	por.rst.cstn	unless
-evidence	rus.rst.rrt	exp-evidence
-cause	spa.rst.sctb	causation
-result	zho.dep.scidtb	findings
-Contrast	zho.pdtb.cdtb	adversative-concession
-Conditional	zho.pdtb.cdtb	context-background
-Purpose	zho.pdtb.cdtb	goal
-context-background	zho.rst.gcdt	bg-general
-causal-cause	zho.rst.gcdt	cause
-contingency-condition	zho.rst.gcdt	condition
-adversative-contrast	zho.rst.gcdt	contrast
-background	zho.rst.sctb	bg-general
-elaboration	zho.rst.sctb	elab-addition
-purpose	zho.rst.sctb	goal
-means	zho.rst.sctb	mode-means
-result	deu.rst.pcc	findings
-BACKGROUND	eng.dep.covdtb	bg-compare
-ELABORATION	eng.dep.covdtb	elab-example
-CONDITION	eng.dep.covdtb	contingency.negative-condition
-bg-goal	eng.dep.scidtb	contingency.purpose
-cause	eng.dep.scidtb	causal
-manner-means	eng.dep.scidtb	MANNER-MEANS
-organization	eng.dep.scidtb	textual-organization
-result	eng.dep.scidtb	findings
-Expansion.Substitution	eng.pdtb.pdtb	restatement
-Contingency.Condition	eng.pdtb.pdtb	Acknowledgement
-Expansion.Conjunction	eng.pdtb.pdtb	evaluation
-Contingency.Cause	eng.pdtb.pdtb	causation
-Expansion.Conjunction	eng.pdtb.pdtb	textual-organization
-expansion.restatement	eng.pdtb.tedm	restatement
-Contingency.Cause	eng.pdtb.tedm	findings
-Hypophora	eng.pdtb.tedm	qap.hypophora
-elaboration	eng.rst.gum	elaboration-additional
-topic	eng.rst.gum	topic-change
-result	eng.rst.gum	findings
-elaboration	eng.rst.rstdt	expansion.equivalence
-textual-organization	eng.rst.rstdt	continuation
-manner-means	eng.rst.rstdt	means
-Alternation	eng.sdrt.stac	alternative
-Correction	eng.sdrt.stac	expansion.substitution
-Background	eng.sdrt.stac	bg-general
-joint	fas.rst.prstc	textual-organization
-frame	fra.sdrt.annodis	expansion.manner
-organization	fra.sdrt.annodis	textual-organization
-Expansion.Conjunction	ita.pdtb.luna	expansion.equivalence
-Contingency.Cause	ita.pdtb.luna	attribution
-Expansion.Restatement	ita.pdtb.luna	expansion.substitution
-Expansion.Conjunction	ita.pdtb.luna	sequence
-cause	nld.rst.nldt	causation
-Contingency.Cause	por.pdtb.crpc	nonvolitional-cause
-Temporal	por.pdtb.crpc	continuation
-Comparison.Contrast	por.pdtb.tedm	comparison
-Expansion.Conjunction	por.pdtb.tedm	sequence
-parenthetical	por.rst.cstn	context
-cause-effect	rus.rst.rrt	cause-result
-Expansion.Conjunction	spa.rst.rststb	comparison
-contrast	spa.rst.sctb	comparison
-Expansion.Level-of-detail	tur.pdtb.tdb	restatement
-manner-means	zho.dep.scidtb	mode-means
-Conjunction	zho.pdtb.cdtb	joint-other
-Conjunction	zho.pdtb.cdtb	evaluation
-Conjunction	zho.pdtb.cdtb	evaluation-comment
-Causation	zho.pdtb.cdtb	mode-means
-Causation	zho.pdtb.cdtb	mode-manner
-Expansion	zho.pdtb.cdtb	organization-preparation
-result	zho.rst.gcdt	findings
-cause	zho.rst.sctb	causal-result
-preparation	zho.rst.sctb	organization-preparation
-TEMPORAL	eng.dep.covdtb	progression
-TEMPORAL	eng.dep.covdtb	continuation
-TEXTUAL-ORGANIZATION	eng.dep.scidtb	organization
-Expansion.Level-of-detail	eng.pdtb.pdtb	expansion.level-of-detail
-Expansion.Disjunction	eng.pdtb.pdtb	expansion.disjunction
-Expansion.Manner	eng.pdtb.pdtb	manner-means
-Comparison.Contrast	eng.pdtb.pdtb	adversative
-organization	eng.rst.gum	summary
-restatement	eng.rst.gum	alternative
-elaboration	eng.rst.gum	elab-process_step
-elaboration	eng.rst.gum	elaboration-additional
-causal	eng.rst.gum	result
-restatement	eng.rst.gum	parallel
-comparison	eng.rst.rstdt	adversative
-temporal	eng.rst.rstdt	circumstance
-textual-organization	eng.rst.rstdt	expansion.restatement
-temporal	eng.rst.rstdt	temporal.synchronous
-elaboration	eng.rst.rstdt	hypophora
-manner-means	eng.rst.rstdt	means
-Correction	eng.sdrt.stac	correction
-disjunction	eus.rst.ert	expansion.disjunction
-evidence	eus.rst.ert	exp-evidence
-Expansion.Conjunction	ita.pdtb.luna	Expansion.Disjunction
-explanation-evidence	nld.rst.nldt	exp-evidence
-Expansion.Level	por.pdtb.crpc	interpretation
-Hypophora	por.pdtb.tedm	qap.hypophora
-Expansion.Conjunction	por.pdtb.tedm	joint
-evidence	por.rst.cstn	exp-evidence
-elaboration	rus.rst.rrt	elab-addition
-evidence	spa.rst.rststb	exp-evidence
-conjunction	spa.rst.rststb	Expansion.Conjunction
-list	spa.rst.sctb	joint-disjunction
-evidence	spa.rst.sctb	exp-evidence
-attribution	zho.dep.scidtb	attribution-positive
-joint	zho.dep.scidtb	expansion.conjunction
-joint	zho.dep.scidtb	list
-bg-general	zho.dep.scidtb	context-background
-explanation-justify	zho.pdtb.cdtb	justify
-contingency-condition	zho.rst.gcdt	condition
-explanation-justify	zho.rst.gcdt	justify
-causal-cause	zho.rst.gcdt	cause
-adversative-contrast	zho.rst.gcdt	contrast
-concession	zho.rst.sctb	adversative-concession
-contrast	zho.rst.sctb	adversative-contrast
-Expansion.Level-of-detail	tha.pdtb.tdtb	expansion.level
-volitional-cause	por.rst.cstn	causation
-comparison	por.rst.cstn	Comparison.Contrast
-Elaboration	eng.sdrt.stac	mode
-Elaboration	eng.sdrt.stac	expansion.manner
-list	zho.rst.sctb	joint-other
-evidence	nld.rst.nldt	explanation-evidence
-Comparison.Concession	por.pdtb.tedm	concession
-Expansion.Alternative	ita.pdtb.luna	alternative
-QAP	por.pdtb.crpc	qap
-QAP.Hypophora	por.pdtb.crpc	qap.hypophora
-manner-means	eng.dep.scidtb	expansion.manner
-joint-list	zho.rst.gcdt	joint
-continuation	fra.sdrt.annodis	expansion.conjunction
-alternation	fra.sdrt.annodis	alternative
-purpose	por.rst.cstn	contingency.purpose
-conjuntion	zho.rst.sctb	temporal
-contrast	nld.rst.nldt	comparison
-Hypophora	por.pdtb.tedm	qap
-Expansion	zho.pdtb.cdtb	explanation-justify
-Contrast	zho.pdtb.cdtb	adversative-antithesis
-Causation	zho.pdtb.cdtb	contingency.cause
-Expansion	zho.pdtb.cdtb	bg-general
-Temporal	zho.pdtb.cdtb	temporal.synchrony
-Expansion.Level-of-detail	eng.pdtb.tedm	expansion.restatement
-Hypophora	eng.pdtb.tedm	qap
-causal	eng.rst.gum	causal-cause
-elaboration	eng.rst.gum	elab-example
-joint	eng.rst.gum	joint-disjunction
-organization	eng.rst.gum	organization-preparation
-FINDINGS	eng.dep.covdtb	exp-evidence
-joint	eng.dep.scidtb	joint-disjunction
-condition	eng.dep.scidtb	contingency.condition
-attribution	eng.rst.gum	attribution-positive
\ No newline at end of file
diff --git a/mappings/mappings-classes-braud.tsv b/mappings/mappings-classes-braud.tsv
deleted file mode 100644
index f4ca27dfd29916320dc5f7b4662c0cecc2ca4a64..0000000000000000000000000000000000000000
--- a/mappings/mappings-classes-braud.tsv
+++ /dev/null
@@ -1,163 +0,0 @@
-LABEL	CLASS	MAPPING
-adversative	contrast	1
-adversative-antithesis	contrast	1
-adversative-concession	contrast	1
-adversative-contrast	contrast	1
-alternative	condition	10
-antithesis	contrast	1
-attribution	attribution	2
-attribution-negative	attribution	2
-attribution-positive	attribution	2
-background	background	8
-causal	cause	6
-causal-cause	cause	6
-causal-result	cause	6
-cause	cause	6
-cause-effect	cause	6
-circumstance	background	8
-comparison	comparison	12
-concession	contrast	1
-conclusion	evaluation	13
-condition	condition	10
-conjunction	joint	16
-context	explanation	15
-context-background	background	8
-context-circumstance	background	8
-contingency	condition	10
-contingency-condition	condition	10
-contrast	contrast	1
-disjunction	same-unit	11
-e-elaboration	elaboration	3
-effect	cause	6
-elaboration	elaboration	3
-elaboration-additional	elaboration	3
-elaboration-attribute	elaboration	3
-enablement	enablement	14
-evaluation	evaluation	13
-evaluation-comment	evaluation	13
-evaluation-n	evaluation	13
-evaluation-s	evaluation	13
-evidence	explanation	15
-explanation	explanation	15
-explanation-evidence	explanation	15
-explanation-justify	explanation	15
-explanation-motivation	explanation	15
-interpretation	evaluation	13
-interpretation-evaluation	evaluation	13
-joint	joint	16
-joint-disjunction	joint	16
-joint-list	joint	16
-joint-other	joint	16
-joint-sequence	temporal	17
-justify	explanation	15
-list	joint	16
-manner-means	manner-means	4
-means	manner-means	4
-mode	manner-means	4
-mode-manner	manner-means	4
-mode-means	manner-means	4
-motivation	explanation	15
-nonvolitional-cause	cause	6
-nonvolitional-cause-e	cause	6
-nonvolitional-result	cause	6
-nonvolitional-result-e	cause	6
-organization	textual-organization	0
-organization-heading	textual-organization	0
-organization-phatic	textual-organization	0
-organization-preparation	textual-organization	0
-otherwise	condition	10
-parenthetical	same-unit	11
-preparation	background	8
-purpose	enablement	14
-purpose-attribute	enablement	14
-purpose-goal	enablement	14
-reason	explanation	15
-restatement	summary	5
-restatement-mn	summary	5
-restatement-partial	summary	5
-restatement-repetition	summary	5
-result	cause	6
-sequence	temporal	17
-solutionhood	topic-comment	7
-summary	summary	5
-temporal	temporal	17
-textual-organization	textual-organization	0
-topic	topic-comment	7
-topic-change	topic-change	9
-topic-comment	topic-comment	7
-topic-drift	topic-change	9
-topic-question	topic-comment	7
-topic-solutionhood	topic-comment	7
-unconditional	condition	10
-unless	condition	10
-volitional-cause	cause	6
-volitional-result	cause	6
-causation	cause	6
-comparison.concession	contrast	1
-comparison.concession+speechact	comparison	12
-comparison.contrast	contrast	1
-comparison.degree	comparison	12
-comparison.similarity	comparison	12
-conditional	condition	10
-contingency.cause	condition	10
-contingency.cause+belief	condition	10
-contingency.cause+speechact	condition	10
-contingency.condition	condition	10
-contingency.condition+speechact	condition	10
-contingency.goal	condition	10
-contingency.negative-cause	cause	6
-contingency.negative-condition	condition	10
-contingency.purpose	enablement	14
-expansion	elaboration	3
-expansion.alternative	condition	10
-expansion.conjunction	joint	16
-expansion.correction	contrast	1
-expansion.disjunction	cause	6
-expansion.equivalence	comparison	12
-expansion.exception	contrast	1
-expansion.instantiation	elaboration	3
-expansion.level-of-detail	elaboration	3
-expansion.manner	manner-means	4
-expansion.restatement	summary	5
-expansion.substitution	contrast	1
-hypophora	topic-comment	7
-interrupted	topic-change	9
-progression	temporal	17
-repetition	elaboration	3
-temporal.asynchronous	temporal	17
-temporal.synchronous	temporal	17
-temporal.synchrony	temporal	17
-qap	topic-comment	7
-contingency.negative-condition+speechact	condition	10
-contingency.negative	condition	10
-expansion.genexpansion	elaboration	3
-expansion.level	elaboration	3
-qap.hypophora	topic-comment	7
-bg-compare	background	8
-bg-general	background	8
-bg-goal	background	8
-cause-result	cause	6
-elab-addition	elaboration	3
-elab-aspect	elaboration	3
-elab-definition	elaboration	3
-elab-enumember	elaboration	3
-elab-example	elaboration	3
-elab-process_step	elaboration	3
-exp-evidence	explanation	15
-exp-reason	explanation	15
-findings	cause	6
-acknowledgement	attribution	2
-alternation	condition	10
-clarification_question	topic-comment	7
-comment	evaluation	13
-continuation	joint	16
-correction	contrast	1
-explanation*	explanation	15
-flashback	explanation	15
-frame	explanation	15
-goal	enablement	14
-narration	elaboration	3
-parallel	joint	16
-q_elab	elaboration	3
-question_answer_pair	topic-comment	7
-temploc	temporal	17
diff --git a/mappings/mappings-classes-rst.tsv b/mappings/mappings-classes-rst.tsv
deleted file mode 100644
index 8938d6b4e12bbe7ff1f0cd1b474c9fa5da1518bf..0000000000000000000000000000000000000000
--- a/mappings/mappings-classes-rst.tsv
+++ /dev/null
@@ -1,163 +0,0 @@
-LABEL	CLASS	MAPPING
-adversative	contrast	1
-adversative-antithesis	contrast	1
-adversative-concession	contrast	1
-adversative-contrast	contrast	1
-alternative	condition	10
-antithesis	contrast	1
-attribution	attribution	2
-attribution-negative	attribution	2
-attribution-positive	attribution	2
-background	background	8
-causal	cause	6
-causal-cause	cause	6
-causal-result	cause	6
-cause	cause	6
-cause-effect	cause	6
-circumstance	background	8
-comparison	comparison	11
-concession	contrast	1
-conclusion	evaluation	12
-condition	condition	10
-conjunction	joint	15
-context	background	8
-context-background	background	8
-context-circumstance	background	8
-contingency	condition	10
-contingency-condition	condition	10
-contrast	contrast	1
-disjunction	joint	15
-e-elaboration	elaboration	3
-effect	cause	6
-elaboration	elaboration	3
-elaboration-additional	elaboration	3
-elaboration-attribute	elaboration	3
-enablement	enablement	13
-evaluation	evaluation	12
-evaluation-comment	evaluation	12
-evaluation-n	evaluation	12
-evaluation-s	evaluation	12
-evidence	explanation	14
-explanation	explanation	14
-explanation-evidence	explanation	14
-explanation-justify	explanation	14
-explanation-motivation	explanation	14
-interpretation	evaluation	12
-interpretation-evaluation	evaluation	12
-joint	joint	15
-joint-disjunction	joint	15
-joint-list	joint	15
-joint-other	joint	15
-joint-sequence	temporal	16
-justify	explanation	14
-list	joint	15
-manner-means	manner-means	4
-means	manner-means	4
-mode	manner-means	4
-mode-manner	manner-means	4
-mode-means	manner-means	4
-motivation	explanation	14
-nonvolitional-cause	cause	6
-nonvolitional-cause-e	cause	6
-nonvolitional-result	cause	6
-nonvolitional-result-e	cause	6
-organization	background	8
-organization-heading	background	8
-organization-phatic	background	8
-organization-preparation	background	8
-otherwise	condition	10
-parenthetical	elaboration	3
-preparation	background	8
-purpose	enablement	13
-purpose-attribute	enablement	13
-purpose-goal	enablement	13
-reason	explanation	14
-restatement	summary	5
-restatement-mn	summary	5
-restatement-partial	summary	5
-restatement-repetition	summary	5
-result	cause	6
-sequence	temporal	16
-solutionhood	topic-comment	7
-summary	summary	5
-temporal	temporal	16
-textual-organization	background	8
-topic	topic-comment	7
-topic-change	topic-change	9
-topic-comment	topic-comment	7
-topic-drift	topic change	0
-topic-question	topic-comment	7
-topic-solutionhood	topic-comment	7
-unconditional	condition	10
-unless	condition	10
-volitional-cause	cause	6
-volitional-result	cause	6
-causation	cause	6
-comparison.concession	contrast	1
-comparison.concession+speechact	comparison	11
-comparison.contrast	contrast	1
-comparison.degree	comparison	11
-comparison.similarity	comparison	11
-conditional	condition	10
-contingency.cause	condition	10
-contingency.cause+belief	condition	10
-contingency.cause+speechact	condition	10
-contingency.condition	condition	10
-contingency.condition+speechact	condition	10
-contingency.goal	condition	10
-contingency.negative-cause	cause	6
-contingency.negative-condition	condition	10
-contingency.purpose	enablement	13
-expansion	elaboration	3
-expansion.alternative	condition	10
-expansion.conjunction	joint	15
-expansion.correction	contrast	1
-expansion.disjunction	joint	15
-expansion.equivalence	comparison	11
-expansion.exception	contrast	1
-expansion.instantiation	elaboration	3
-expansion.level-of-detail	elaboration	3
-expansion.manner	manner-means	4
-expansion.restatement	summary	5
-expansion.substitution	contrast	1
-hypophora	topic-comment	7
-interrupted	topic-change	9
-progression	temporal	16
-repetition	elaboration	3
-temporal.asynchronous	temporal	16
-temporal.synchronous	temporal	16
-temporal.synchrony	temporal	16
-qap	topic-comment	7
-contingency.negative-condition+speechact	condition	10
-contingency.negative	condition	10
-expansion.genexpansion	elaboration	3
-expansion.level	elaboration	3
-qap.hypophora	topic-comment	7
-bg-compare	background	8
-bg-general	background	8
-bg-goal	background	8
-cause-result	cause	6
-elab-addition	elaboration	3
-elab-aspect	elaboration	3
-elab-definition	elaboration	3
-elab-enumember	elaboration	3
-elab-example	elaboration	3
-elab-process_step	elaboration	3
-exp-evidence	explanation	14
-exp-reason	explanation	14
-findings	cause	6
-acknowledgement	attribution	2
-alternation	condition	10
-clarification_question	topic-comment	7
-comment	evaluation	12
-continuation	joint	15
-correction	contrast	1
-explanation*	explanation	14
-flashback	explanation	14
-frame	explanation	14
-goal	enablement	13
-narration	elaboration	3
-parallel	joint	15
-q_elab	elaboration	3
-question_answer_pair	topic-comment	7
-temploc	temporal	16
diff --git a/mappings/substitions-classes-braud.tsv b/mappings/substitions-classes-braud.tsv
deleted file mode 100644
index ae4b1fd5d5249f2af6313cbf0b3fca01c4d287a0..0000000000000000000000000000000000000000
--- a/mappings/substitions-classes-braud.tsv
+++ /dev/null
@@ -1,163 +0,0 @@
-LABEL	CLASS
-adversative	contrast
-adversative-antithesis	contrast
-adversative-concession	contrast
-adversative-contrast	contrast
-alternative	condition
-antithesis	contrast
-attribution	attribution
-attribution-negative	attribution
-attribution-positive	attribution
-background	background
-causal	cause
-causal-cause	cause
-causal-result	cause
-cause	cause
-cause-effect	cause
-circumstance	background
-comparison	comparison
-concession	contrast
-conclusion	evaluation
-condition	condition
-conjunction	joint
-context	explanation
-context-background	background
-context-circumstance	background
-contingency	condition
-contingency-condition	condition
-contrast	contrast
-disjunction	same-unit
-e-elaboration	elaboration
-effect	cause
-elaboration	elaboration
-elaboration-additional	elaboration
-elaboration-attribute	elaboration
-enablement	enablement
-evaluation	evaluation
-evaluation-comment	evaluation
-evaluation-n	evaluation
-evaluation-s	evaluation
-evidence	explanation
-explanation	explanation
-explanation-evidence	explanation
-explanation-justify	explanation
-explanation-motivation	explanation
-interpretation	evaluation
-interpretation-evaluation	evaluation
-joint	joint
-joint-disjunction	joint
-joint-list	joint
-joint-other	joint
-joint-sequence	temporal
-justify	explanation
-list	joint
-manner-means	manner-means
-means	manner-means
-mode	manner-means
-mode-manner	manner-means
-mode-means	manner-means
-motivation	explanation
-nonvolitional-cause	cause
-nonvolitional-cause-e	cause
-nonvolitional-result	cause
-nonvolitional-result-e	cause
-organization	textual-organization
-organization-heading	textual-organization
-organization-phatic	textual-organization
-organization-preparation	textual-organization
-otherwise	condition
-parenthetical	same-unit
-preparation	background
-purpose	enablement
-purpose-attribute	enablement
-purpose-goal	enablement
-reason	explanation
-restatement	summary
-restatement-mn	summary
-restatement-partial	summary
-restatement-repetition	summary
-result	cause
-sequence	temporal
-solutionhood	topic-comment
-summary	summary
-temporal	temporal
-textual-organization	textual-organization
-topic	topic-comment
-topic-change	topic-change
-topic-comment	topic-comment
-topic-drift	topic-change
-topic-question	topic-comment
-topic-solutionhood	topic-comment
-unconditional	condition
-unless	condition
-volitional-cause	cause
-volitional-result	cause
-causation	cause
-comparison.concession	contrast
-comparison.concession+speechact	comparison
-comparison.contrast	contrast
-comparison.degree	comparison
-comparison.similarity	comparison
-conditional	condition
-contingency.cause	condition
-contingency.cause+belief	condition
-contingency.cause+speechact	condition
-contingency.condition	condition
-contingency.condition+speechact	condition
-contingency.goal	condition
-contingency.negative-cause	cause
-contingency.negative-condition	condition
-contingency.purpose	enablement
-expansion	elaboration
-expansion.alternative	condition
-expansion.conjunction	joint
-expansion.correction	contrast
-expansion.disjunction	cause
-expansion.equivalence	comparison
-expansion.exception	contrast
-expansion.instantiation	elaboration
-expansion.level-of-detail	elaboration
-expansion.manner	manner-means
-expansion.restatement	summary
-expansion.substitution	contrast
-hypophora	topic-comment
-interrupted	topic-change
-progression	temporal
-repetition	elaboration
-temporal.asynchronous	temporal
-temporal.synchronous	temporal
-temporal.synchrony	temporal
-qap	topic-comment
-contingency.negative-condition+speechact	condition
-contingency.negative	condition
-expansion.genexpansion	elaboration
-expansion.level	elaboration
-qap.hypophora	topic-comment
-bg-compare	background
-bg-general	background
-bg-goal	background
-cause-result	cause
-elab-addition	elaboration
-elab-aspect	elaboration
-elab-definition	elaboration
-elab-enumember	elaboration
-elab-example	elaboration
-elab-process_step	elaboration
-exp-evidence	explanation
-exp-reason	explanation
-findings	cause
-acknowledgement	attribution
-alternation	condition
-clarification_question	topic-comment
-comment	evaluation
-continuation	joint
-correction	contrast
-explanation*	explanation
-flashback	explanation
-frame	explanation
-goal	enablement
-narration	elaboration
-parallel	joint
-q_elab	elaboration
-question_answer_pair	topic-comment
-temploc	temporal
diff --git a/mappings/substitions-classes-rst.tsv b/mappings/substitions-classes-rst.tsv
deleted file mode 100644
index 64485b9c0d9fac1701d0d614846210762a872f74..0000000000000000000000000000000000000000
--- a/mappings/substitions-classes-rst.tsv
+++ /dev/null
@@ -1,163 +0,0 @@
-LABEL	CLASS
-adversative	contrast
-adversative-antithesis	contrast
-adversative-concession	contrast
-adversative-contrast	contrast
-alternative	condition
-antithesis	contrast
-attribution	attribution
-attribution-negative	attribution
-attribution-positive	attribution
-background	background
-causal	cause
-causal-cause	cause
-causal-result	cause
-cause	cause
-cause-effect	cause
-circumstance	background
-comparison	comparison
-concession	contrast
-conclusion	evaluation
-condition	condition
-conjunction	joint
-context	background
-context-background	background
-context-circumstance	background
-contingency	condition
-contingency-condition	condition
-contrast	contrast
-disjunction	joint
-e-elaboration	elaboration
-effect	cause
-elaboration	elaboration
-elaboration-additional	elaboration
-elaboration-attribute	elaboration
-enablement	enablement
-evaluation	evaluation
-evaluation-comment	evaluation
-evaluation-n	evaluation
-evaluation-s	evaluation
-evidence	explanation
-explanation	explanation
-explanation-evidence	explanation
-explanation-justify	explanation
-explanation-motivation	explanation
-interpretation	evaluation
-interpretation-evaluation	evaluation
-joint	joint
-joint-disjunction	joint
-joint-list	joint
-joint-other	joint
-joint-sequence	temporal
-justify	explanation
-list	joint
-manner-means	manner-means
-means	manner-means
-mode	manner-means
-mode-manner	manner-means
-mode-means	manner-means
-motivation	explanation
-nonvolitional-cause	cause
-nonvolitional-cause-e	cause
-nonvolitional-result	cause
-nonvolitional-result-e	cause
-organization	background
-organization-heading	background
-organization-phatic	background
-organization-preparation	background
-otherwise	condition
-parenthetical	elaboration
-preparation	background
-purpose	enablement
-purpose-attribute	enablement
-purpose-goal	enablement
-reason	explanation
-restatement	summary
-restatement-mn	summary
-restatement-partial	summary
-restatement-repetition	summary
-result	cause
-sequence	temporal
-solutionhood	topic-comment
-summary	summary
-temporal	temporal
-textual-organization	background
-topic	topic-comment
-topic-change	topic-change
-topic-comment	topic-comment
-topic-drift	topic change
-topic-question	topic-comment
-topic-solutionhood	topic-comment
-unconditional	condition
-unless	condition
-volitional-cause	cause
-volitional-result	cause
-causation	cause
-comparison.concession	contrast
-comparison.concession+speechact	comparison
-comparison.contrast	contrast
-comparison.degree	comparison
-comparison.similarity	comparison
-conditional	condition
-contingency.cause	condition
-contingency.cause+belief	condition
-contingency.cause+speechact	condition
-contingency.condition	condition
-contingency.condition+speechact	condition
-contingency.goal	condition
-contingency.negative-cause	cause
-contingency.negative-condition	condition
-contingency.purpose	enablement
-expansion	elaboration
-expansion.alternative	condition
-expansion.conjunction	joint
-expansion.correction	contrast
-expansion.disjunction	joint
-expansion.equivalence	comparison
-expansion.exception	contrast
-expansion.instantiation	elaboration
-expansion.level-of-detail	elaboration
-expansion.manner	manner-means
-expansion.restatement	summary
-expansion.substitution	contrast
-hypophora	topic-comment
-interrupted	topic-change
-progression	temporal
-repetition	elaboration
-temporal.asynchronous	temporal
-temporal.synchronous	temporal
-temporal.synchrony	temporal
-qap	topic-comment
-contingency.negative-condition+speechact	condition
-contingency.negative	condition
-expansion.genexpansion	elaboration
-expansion.level	elaboration
-qap.hypophora	topic-comment
-bg-compare	background
-bg-general	background
-bg-goal	background
-cause-result	cause
-elab-addition	elaboration
-elab-aspect	elaboration
-elab-definition	elaboration
-elab-enumember	elaboration
-elab-example	elaboration
-elab-process_step	elaboration
-exp-evidence	explanation
-exp-reason	explanation
-findings	cause
-acknowledgement	attribution
-alternation	condition
-clarification_question	topic-comment
-comment	evaluation
-continuation	joint
-correction	contrast
-explanation*	explanation
-flashback	explanation
-frame	explanation
-goal	enablement
-narration	elaboration
-parallel	joint
-q_elab	elaboration
-question_answer_pair	topic-comment
-temploc	temporal
diff --git a/train_classifiers.sh b/train_classifiers.sh
new file mode 100644
index 0000000000000000000000000000000000000000..324127d53a55d96661883c17b6069cdf79c72ae1
--- /dev/null
+++ b/train_classifiers.sh
@@ -0,0 +1,22 @@
+#!/usr/bin/env bash
+
+# IMPORTANT: Add the path to your cloned DISRPT repo!
+# This script will train all three classifiers (and 2 adapters)
+# needed for this task. 
+# First script does not need the adapters, but the last 2 ones do.
+# If you do not have the adapters, please run all five scripts. 
+# If you have the adapters trained,
+# you can directly run the classifiers.
+
+# To reproduce the results of MELODI team, do NOT change other params.
+
+# bare classifier
+python pytorch_classifier.py --num_epochs 5 --data_path 'PATH_TO_REPO'
+
+# Train the adapters:
+python make_adapter.py --num_epochs 15 --data_path 'PATH_TO_REPO' --freeze_layers 'layer.1;layer.2;layer.3'
+python make_adapter.py --num_epochs 15 --data_path 'PATH_TO_REPO' --freeze_layers 'layer.1'
+
+# Run classifiers with adapters
+python adapter_classifier.py --num_epochs 3 --data_path 'PATH_TO_REPO' --adapter_name 'adapter_15-epochs_frozen-1'
+python adapter_classifier.py --num_epochs 4 --data_path 'PATH_TO_REPO' --adapter_name 'adapter_15-epochs_frozen-1-2-3'
\ No newline at end of file