File size: 2,258 Bytes
63a2ee7 cc7663c f5bc984 2aa3700 cc7663c 63a2ee7 f5bc984 63a2ee7 cc52342 0e3ad57 7004179 011a828 63a2ee7 6cbeedf da63718 6cbeedf 0e43f0b 37f6179 a39c367 37f6179 c8691ae c0a6979 37f6179 0e43f0b 6cbeedf 921294b f5bc984 dcac4d1 068f083 0ab8c6f f5bc984 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 |
import numpy as np
import pandas as pd
import requests
import os
import gradio as gr
import json
from dotenv import load_dotenv, find_dotenv
_ = load_dotenv(find_dotenv())
databricks_token = os.getenv('DATABRICKS_TOKEN')
model_uri = "https://dbc-eb788f31-6c73.cloud.databricks.com/serving-endpoints/Mpt-7b-tester/invocations"
def score_model(model_uri, databricks_token, data):
headers = {
"Authorization": f"Bearer {databricks_token}",
"Content-Type": "application/json",
}
data_json = '{"dataframe_split": {"index": [0], "columns": ["prompt", "temperature", "max_tokens"], "data": [["' + data + '", 0.5, 500]]}}'
print("***data_json: ")
print(data_json)
response = requests.request(method='POST', headers=headers, url=model_uri, json=data_json)
if response.status_code != 200:
raise Exception(f"Request failed with status {response.status_code}, {response.text}")
return response.json()
def get_completion(prompt):
return score_model(model_uri, databricks_token, prompt)
def greet(input):
prompt = f"""
Determine the product or solution, the problem being solved, features, target customer that are being discussed in the \
following text, which is delimited by triple backticks. Then, pretend that you are the target customer. \
State if you would use this product and elaborate on why. Also state if you would pay for it and elaborate on why.\
Format your response as a JSON object with \
'solution', 'problem', 'features', 'target_customer', 'fg_will_use', 'reason_to_use', 'fg_will_pay', 'reason_to_pay' as the keys.\
Text sample: '''{input}'''
"""
response = get_completion(prompt)
return json.dumps(response)
#iface = gr.Interface(fn=greet, inputs="text", outputs="text")
#iface.launch()
#iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="Text to find entities", lines=2)], outputs=[gr.HighlightedText(label="Text with entities")], title="NER with dslim/bert-base-NER", description="Find entities using the `dslim/bert-base-NER` model under the hood!", allow_flagging="never", examples=["My name is Andrew and I live in California", "My name is Poli and work at HuggingFace"])
iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="Elevator pitch", lines=3)], outputs="json")
iface.launch()
|