simple complet
Browse files- metadata_transformer.py +6 -10
metadata_transformer.py
CHANGED
@@ -9,22 +9,18 @@ login(token=TOKEN)
|
|
9 |
|
10 |
huggingface.prompt_builder = build_llama2_prompt
|
11 |
|
12 |
-
system_message = """
|
13 |
-
You are a metadata schema translator. You translate metadata from one schema to another.
|
14 |
-
"""
|
15 |
|
16 |
def translate(schema_input, schema_target):
|
17 |
prompt = '"""{} \n Translate the schema metadata file above to the schema: {}"""'.format(schema_input, schema_target)
|
18 |
-
response = huggingface.
|
19 |
model="meta-llama/Llama-2-7b-hf",
|
20 |
-
|
21 |
-
{"role": "system", "content": system_message},
|
22 |
-
{"role": "user", "content": prompt},
|
23 |
-
],
|
24 |
-
# prompt=prompt,
|
25 |
temperature=0.9,
|
26 |
top_p=0.6,
|
27 |
max_tokens=256,
|
28 |
)
|
29 |
-
|
30 |
return response['choices'][0]['message']['content']
|
|
|
9 |
|
10 |
huggingface.prompt_builder = build_llama2_prompt
|
11 |
|
12 |
+
# system_message = """
|
13 |
+
# You are a metadata schema translator. You translate metadata from one schema to another.
|
14 |
+
# """
|
15 |
|
16 |
def translate(schema_input, schema_target):
|
17 |
prompt = '"""{} \n Translate the schema metadata file above to the schema: {}"""'.format(schema_input, schema_target)
|
18 |
+
response = huggingface.Completion.create(
|
19 |
model="meta-llama/Llama-2-7b-hf",
|
20 |
+
prompt=prompt,
|
|
|
|
|
|
|
|
|
21 |
temperature=0.9,
|
22 |
top_p=0.6,
|
23 |
max_tokens=256,
|
24 |
)
|
25 |
+
|
26 |
return response['choices'][0]['message']['content']
|