|
import os |
|
os.system("pip install -U huggingface_hub") |
|
|
|
from haystack.nodes import FARMReader |
|
|
|
|
|
question_reader_save_path = "en_zh_question_reader_save_epc_2_spo" |
|
assert os.path.exists(question_reader_save_path) |
|
en_zh_reader = FARMReader(model_name_or_path=question_reader_save_path, use_gpu=False, |
|
num_processes = 0 |
|
) |
|
|
|
def output_to_dict(output, trans_keys = ["answers"]): |
|
non_trans_t2_list = list(filter(lambda t2: t2[0] not in trans_keys, output.items())) |
|
trans_t2_list = list(map(lambda tt2: ( |
|
tt2[0], |
|
list(map(lambda x: x.to_dict(), tt2[1])) |
|
) ,filter(lambda t2: t2[0] in trans_keys, output.items()))) |
|
|
|
return dict(trans_t2_list + non_trans_t2_list) |
|
|
|
''' |
|
en_zh_reader.predict_on_texts( |
|
question=x.replace("hp:", ""), |
|
texts=[zh_question] |
|
) |
|
''' |
|
|
|
import gradio as gr |
|
|
|
example_sample = [ |
|
["NingBo" ,"宁波在哪个省份?"], |
|
["province" ,"宁波在哪个省份?"], |
|
["currency" ,"美国的通货是什么?"], |
|
] |
|
|
|
def demo_func(eng_span, zh_sentence): |
|
assert type(eng_span) == type("") |
|
assert type(zh_sentence) == type("") |
|
output = en_zh_reader.predict_on_texts( |
|
question=eng_span, |
|
texts=[zh_sentence] |
|
) |
|
output = output_to_dict(output) |
|
return output |
|
|
|
demo = gr.Interface( |
|
fn=demo_func, |
|
inputs=[ |
|
gr.Text(label="English Span"), |
|
gr.Text(label="Chinese Sentence"), |
|
], |
|
outputs="json", |
|
title=f"Extract Similar Chinese Span by English From Chinese 🍔 demonstration", |
|
examples=example_sample if example_sample else None, |
|
cache_examples = False |
|
) |
|
|
|
demo.launch(server_name=None, server_port=None) |
|
|