Skip to content
Snippets Groups Projects
Commit c0cb9c69 authored by Julien Breton's avatar Julien Breton
Browse files

fine tuning

parent 27967a29
Branches
No related tags found
No related merge requests found
# libraries
import json
import numpy as np
import matplotlib.pyplot as plt
def build_grouped_barchart(paths, title, output_path):
bars = [[], [], []]
def build_grouped_barchart(config, title, output_path):
bars = []
for i in range(len(paths)):
for i in range(len(config)):
bars.append([])
for i in range(len(config)):
sum_perfect_equals = 0
sum_subpart = 0
sum_miss_classification = 0
sum_hallucination = 0
with open(paths[i]) as file:
with open(config[i]['ref']) as file:
data = json.load(file)
data = data["break_down"]
for tag, values in data.items():
......@@ -27,40 +29,48 @@ def build_grouped_barchart(paths, title, output_path):
bars[i].append(sum_miss_classification)
bars[i].append(sum_hallucination)
# set width of bars
barWidth = 0.25
r_list = [np.arange(len(bars[0]))]
# Set position of bar on X axis
r1 = np.arange(len(bars[0]))
r2 = [x + barWidth for x in r1]
r3 = [x + barWidth for x in r2]
for i in range(1, len(config)):
r_list.append([x + barWidth for x in r_list[i-1]])
# Make the plot
plt.bar(r1, bars[0], color='#75ac9d', width=barWidth, edgecolor='white', label='GPT-4')
plt.bar(r2, bars[1], color='#fca301', width=barWidth, edgecolor='white', label='Mixtral-8x7b')
plt.bar(r3, bars[2], color='#5619d8', width=barWidth, edgecolor='white', label='Mistral-7b')
for i in range(len(config)):
plt.bar(r_list[i], bars[i], color=config[i]['color'], width=barWidth, edgecolor='white', label=config[i]['title'])
# Add xticks on the middle of the group bars
plt.ylabel('Number of predicate')
plt.xlabel(title, fontweight='bold')
plt.xticks([r + barWidth for r in range(len(bars[0]))], ['Perfect equals', 'Subpart', 'Miss classification', 'Others'])
plt.xticks([r + barWidth for r in range(len(bars[0]))],
['Perfect equals', 'Subpart', 'Miss classification', 'Others'])
# Create legend & Show graphic
plt.legend()
plt.savefig(output_path)
# output_path = "../../results/LLM/breakdown_classification_zero_shot.png"
# title = "Zero-shot predicate extraction"
# bars1_path = "../../results/LLM/GPT-4/GPT-4_zero_shot_results.json"
# bars2_path = "../../results/LLM/Mixtral-8x7b/MIXTRAL_zero_shot_results.json"
# bars3_path = "../../results/LLM/Mistral-7b/MISTRAL_zero_shot_results.json"
title = "Few-shot predicate extraction"
output_path = "../../results/LLM/breakdown_classification_few_shot.png"
bars1_path = "../../results/LLM/GPT-4/GPT-4_few_shot_results.json"
bars2_path = "../../results/LLM/Mixtral-8x7b/MIXTRAL_few_shot_results.json"
bars3_path = "../../results/LLM/Mistral-7b/MISTRAL_few_shot_results.json"
config = [
{
'ref': '../../results/LLM/GPT-4/GPT-4_zero_shot_results.json',
'title': 'GPT-4 zero shot',
'color': '#436850'
},
{
'ref': '../../results/LLM/Mistral-7b/MISTRAL_zero_shot_results.json',
'title': 'Mistral-7b zero shot',
'color': '#5619d8'
},
{
'ref': '../../results/LLM/Mistral-7b/MISTRAL_fine_tuned_results.json',
'title': 'Mistral-7b fine tuned',
'color': '#6895D2'
}
]
title = "Evolution from Zero-shot to Fine-tuning predicate extraction with Mistral"
output_path = "../../results/LLM/GPT-zs_MIS-zs_MIS-ft.png"
paths = [bars1_path, bars2_path, bars3_path]
build_grouped_barchart(paths, title, output_path)
\ No newline at end of file
build_grouped_barchart(config, title, output_path)
\ No newline at end of file
......@@ -57,7 +57,7 @@ def fine_tune(base_model, new_model):
# Hyperparameters should beadjusted based on the hardware you using
training_arguments = TrainingArguments(
output_dir= "./results",
num_train_epochs= 1,
num_train_epochs= 6,
per_device_train_batch_size= 8,
gradient_accumulation_steps= 2,
optim = "paged_adamw_8bit",
......
results/LLM/GPT-zs_MIS-zs_MIS-ft.png

28.9 KiB

0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment