-
Caroline DE POURTALES authoredCaroline DE POURTALES authored
train.py 1.13 KiB
import torch
from Linker import *
from utils import read_csv_pgbar
from find_config import configurate
from Configuration import Configuration
torch.cuda.empty_cache()
nb_sentences = 1000000000
file_path_axiom_links = 'Datasets/goldANDsilver_dataset_links.csv'
model_tagger = "models/flaubert_super_98_V2_50e.pt"
# region config
configurate(file_path_axiom_links, model_tagger, nb_sentences=nb_sentences)
config = Configuration.read_config()
version = config["VERSION"]
datasetConfig = config["DATASET_PARAMS"]
modelEncoderConfig = config["MODEL_ENCODER"]
modelLinkerConfig = config["MODEL_LINKER"]
modelTrainingConfig = config["MODEL_TRAINING"]
epochs = int(modelTrainingConfig['epoch'])
batch_size = int(modelTrainingConfig['batch_size'])
# endregion
df_axiom_links = read_csv_pgbar(file_path_axiom_links, nb_sentences)
print("#" * 20)
print("#" * 20)
print("Linker")
# Load the Linker with trained tagger
linker = Linker(model_tagger)
print("\nLinker Training\n")
linker.train_linker(df_axiom_links, validation_rate=0.1, epochs=epochs, batch_size=batch_size,
checkpoint=True, tensorboard=True)
print("#" * 20)
print("#" * 20)