soiz1's picture
Update app.py
6af69d0 verified
raw
history blame
2.35 kB
from transformers import pipeline, set_seed
from flask import Flask, request, jsonify
import random, re
app = Flask(__name__)
# モデルキャッシュ用の辞書
model_cache = {}
# テキスト候補読み込み
with open("ideas.txt", "r") as f:
lines = f.readlines()
def get_pipeline(model_name):
if model_name not in model_cache:
model_cache[model_name] = pipeline('text-generation', model=model_name, tokenizer='gpt2')
return model_cache[model_name]
def generate_prompts(starting_text, model_name, num_prompts=1):
response_list = []
gpt2_pipe = get_pipeline(model_name)
for _ in range(num_prompts):
for count in range(4): # 最大4回試行
seed = random.randint(100, 1000000)
set_seed(seed)
# 入力テキストが空ならランダムに選ぶ
if starting_text == "":
starting_text = lines[random.randrange(0, len(lines))].strip().lower().capitalize()
starting_text = re.sub(r"[,:\-–.!;?_]", '', starting_text)
# テキスト生成
response = gpt2_pipe(starting_text, max_length=random.randint(60, 90), num_return_sequences=1)
generated_text = response[0]['generated_text'].strip()
# テキストをチェック・クリーン
if generated_text != starting_text and len(generated_text) > (len(starting_text) + 4):
cleaned_text = re.sub(r'[^ ]+\.[^ ]+', '', generated_text)
cleaned_text = cleaned_text.replace("<", "").replace(">", "")
response_list.append(cleaned_text)
break
return response_list[:num_prompts]
# APIエンドポイント
@app.route('/', methods=['GET'])
def generate_api():
starting_text = request.args.get('text', default="", type=str)
num_prompts = request.args.get('n', default=1, type=int)
model_param = request.args.get('model', default="sd", type=str).lower()
# モデル選択
if model_param == "dall":
model_name = "Gustavosta/MagicPrompt-Dalle"
else:
model_name = "Gustavosta/MagicPrompt-Stable-Diffusion"
# プロンプト生成
results = generate_prompts(starting_text, model_name, num_prompts=num_prompts)
return jsonify(results)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=7860)