Skip to content
Snippets Groups Projects
Commit bbc8b7f6 authored by Julien Breton's avatar Julien Breton
Browse files

fix imports

parent 77502e59
Branches
No related tags found
No related merge requests found
import json import json
from alive_progress import alive_bar from alive_progress import alive_bar
from openai import OpenAI from openai import OpenAI
import sys import importlib.util
sys.path.append('../')
from utils import get_pre_prompt
spec = importlib.util.spec_from_file_location("utils", "../utils.py")
utils = importlib.util.module_from_spec(spec)
spec.loader.exec_module(utils)
client = OpenAI() client = OpenAI()
...@@ -19,7 +20,7 @@ with alive_bar(len(loaded)) as bar: ...@@ -19,7 +20,7 @@ with alive_bar(len(loaded)) as bar:
completion = client.chat.completions.create( completion = client.chat.completions.create(
model="gpt-4", model="gpt-4",
messages=[ messages=[
{"role": "system", "content": get_pre_prompt()}, {"role": "system", "content": utils.get_pre_prompt()},
{"role": "user", "content": sentence} {"role": "user", "content": sentence}
] ]
) )
...@@ -51,4 +52,4 @@ with alive_bar(len(loaded)) as bar: ...@@ -51,4 +52,4 @@ with alive_bar(len(loaded)) as bar:
bar() bar()
with open('../../../results/GPT-4/GPT-4_zero_shot_answers.json', 'w', encoding='utf-8') as file: with open('../../../results/GPT-4/GPT-4_zero_shot_answers.json', 'w', encoding='utf-8') as file:
json.dump(output, file) # in 44:36.6 (0.08/s) json.dump(output, file) # in 44:36.6 (0.08/s)
import importlib
import json import json
import sys import sys
from alive_progress import alive_bar from alive_progress import alive_bar
import torch import torch
import transformers import transformers
import bitsandbytes, flash_attn import bitsandbytes, flash_attn
sys.path.append('../') spec = importlib.util.spec_from_file_location("utils", "../utils.py")
from utils import get_pre_prompt utils = importlib.util.module_from_spec(spec)
spec.loader.exec_module(utils)
model_id = "../../models/Mistral-7B-Instruct-v0.2" model_id = "../../models/Mistral-7B-Instruct-v0.2"
...@@ -47,7 +49,7 @@ output = {} ...@@ -47,7 +49,7 @@ output = {}
with alive_bar(len(loaded)) as bar: with alive_bar(len(loaded)) as bar:
for sentence in loaded: for sentence in loaded:
input.append(instruction_format(get_pre_prompt(), sentence)) input.append(instruction_format(utils.get_pre_prompt(), sentence))
bar() bar()
print("Input creation finished") print("Input creation finished")
......
import importlib
import json import json
import sys
from alive_progress import alive_bar from alive_progress import alive_bar
import torch import torch
import transformers import transformers
import bitsandbytes, flash_attn import bitsandbytes, flash_attn
sys.path.append('../')
from utils import get_pre_prompt spec = importlib.util.spec_from_file_location("utils", "../utils.py")
utils = importlib.util.module_from_spec(spec)
spec.loader.exec_module(utils)
model_id = "../../models/Mixtral-8x7B-Instruct-v0.1" model_id = "../../models/Mixtral-8x7B-Instruct-v0.1"
...@@ -48,7 +50,7 @@ output = {} ...@@ -48,7 +50,7 @@ output = {}
with alive_bar(len(loaded)) as bar: with alive_bar(len(loaded)) as bar:
for sentence in loaded: for sentence in loaded:
input.append(instruction_format(get_pre_prompt(), sentence)) input.append(instruction_format(utils.get_pre_prompt(), sentence))
bar() bar()
print("Input creation finished") print("Input creation finished")
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment