|
import os |
|
from huggingface_hub import login |
|
from easyllm.clients import huggingface |
|
from easyllm.prompt_utils import build_llama2_prompt |
|
|
|
TOKEN = os.environ.get("TOKEN") |
|
|
|
login(token=TOKEN) |
|
|
|
huggingface.prompt_builder = build_llama2_prompt |
|
|
|
system_message = """ |
|
You are a metadata schema translator. You translate metadata from one schema to another. |
|
""" |
|
|
|
def translate(schema_input, schema_target): |
|
propmpt = '"""{} \n Translate the schema metadata file above to the schema: {}"""'.format(schema_input, schema_target) |
|
response = huggingface.ChatCompletion.create( |
|
model="mistralai/Mistral-7B-v0.1", |
|
messages=[ |
|
{"role": "system", "content": system_message}, |
|
{"role": "user", "content": propmpt}, |
|
], |
|
temperature=0.9, |
|
top_p=0.6, |
|
max_tokens=256, |
|
) |
|
print(response) |
|
return response['choices'][0]['message']['content'] |