Spaces:
Runtime error
Runtime error
File size: 1,428 Bytes
43587d2 d5a1d6e 2baf320 d5a1d6e 03a9819 43587d2 d165430 43587d2 5fb04ff 43587d2 5fb04ff 43587d2 d165430 43587d2 6ab706f d165430 ff9e6f0 d165430 6ab706f ff9e6f0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 |
import gradio as gr, os
from transformers import BartForConditionalGeneration
# 加载 BART 模型
model = BartForConditionalGeneration.from_pretrained("facebook/bart-large-cnn")
def generate_summary(file):
# 重置文件指针位置
file.seek(0)
# 读取上传的文本文件内容
text_content = file.read()
# 使用模型进行处理(摘要生成)
summary_ids = model.generate(text_content, max_length=150, min_length=50, length_penalty=2.0, num_beams=4, early_stopping=True)
summary = model.decode(summary_ids[0], skip_special_tokens=True)
return summary
demo = gr.Interface(
fn=generate_summary,
inputs=gr.File(),
outputs="text",
live=False
)
# 启动应用
demo.launch(share=True)
# 加载 BART 模型
model = BartForConditionalGeneration.from_pretrained("models/fnlp/bart-base-chinese")
def generate_summary(file):
# 重置文件指针位置
file.seek(0)
# 读取上传的文本文件内容
text_content = file.read()
# 使用模型进行处理(摘要生成)
summary_ids = model.generate(text_content, max_length=150, min_length=50, length_penalty=2.0, num_beams=4, early_stopping=True)
summary = model.decode(summary_ids[0], skip_special_tokens=True)
return summary
demo = gr.Interface(
fn=generate_summary,
inputs=gr.File(),
outputs="text",
live=False
)
# 启动应用
demo.launch(share=True)
|