|
from transformers import pipeline, set_seed |
|
from flask import Flask, request, jsonify |
|
import random, re |
|
|
|
app = Flask(__name__) |
|
|
|
|
|
model_cache = {} |
|
|
|
|
|
with open("ideas.txt", "r") as f: |
|
lines = f.readlines() |
|
|
|
def get_pipeline(model_name): |
|
if model_name not in model_cache: |
|
model_cache[model_name] = pipeline('text-generation', model=model_name, tokenizer='gpt2') |
|
return model_cache[model_name] |
|
|
|
def generate_prompts(starting_text, model_name, num_prompts=1): |
|
response_list = [] |
|
gpt2_pipe = get_pipeline(model_name) |
|
|
|
for _ in range(num_prompts): |
|
for count in range(4): |
|
seed = random.randint(100, 1000000) |
|
set_seed(seed) |
|
|
|
|
|
if starting_text == "": |
|
starting_text = lines[random.randrange(0, len(lines))].strip().lower().capitalize() |
|
starting_text = re.sub(r"[,:\-–.!;?_]", '', starting_text) |
|
|
|
|
|
response = gpt2_pipe(starting_text, max_length=random.randint(60, 90), num_return_sequences=1) |
|
generated_text = response[0]['generated_text'].strip() |
|
|
|
|
|
if generated_text != starting_text and len(generated_text) > (len(starting_text) + 4): |
|
cleaned_text = re.sub(r'[^ ]+\.[^ ]+', '', generated_text) |
|
cleaned_text = cleaned_text.replace("<", "").replace(">", "") |
|
response_list.append(cleaned_text) |
|
break |
|
|
|
return response_list[:num_prompts] |
|
|
|
|
|
@app.route('/', methods=['GET']) |
|
def generate_api(): |
|
starting_text = request.args.get('text', default="", type=str) |
|
num_prompts = request.args.get('n', default=1, type=int) |
|
model_param = request.args.get('model', default="sd", type=str).lower() |
|
|
|
|
|
if model_param == "dall": |
|
model_name = "Gustavosta/MagicPrompt-Dalle" |
|
else: |
|
model_name = "Gustavosta/MagicPrompt-Stable-Diffusion" |
|
|
|
|
|
results = generate_prompts(starting_text, model_name, num_prompts=num_prompts) |
|
return jsonify(results) |
|
|
|
if __name__ == '__main__': |
|
app.run(host='0.0.0.0', port=7860) |
|
|