File size: 914 Bytes
d81ed4d feafd16 0c9de8c d81ed4d 0c9de8c d81ed4d 0c9de8c feafd16 3ffbd50 645bc65 feafd16 fb87d12 feafd16 9455d77 3eb35b5 feafd16 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 |
import os
from huggingface_hub import login
from easyllm.clients import huggingface
from easyllm.prompt_utils import build_llama2_prompt
TOKEN = os.environ.get("TOKEN")
login(token=TOKEN)
huggingface.prompt_builder = build_llama2_prompt
system_message = """
You are a metadata schema translator. You translate metadata from one schema to another.
"""
def translate(schema_input, schema_target):
propmpt = '"""{} \n Translate the schema metadata file above to the schema: {}"""'.format(schema_input, schema_target)
response = huggingface.ChatCompletion.create(
model="mistralai/Mistral-7B-v0.1",
messages=[
{"role": "system", "content": system_message},
{"role": "user", "content": propmpt},
],
temperature=0.9,
top_p=0.6,
max_tokens=256,
)
print(response)
return response['choices'][0]['message']['content'] |