Spaces:
Paused
Paused
Update app.py via AI Editor
Browse files
app.py
CHANGED
@@ -10,15 +10,17 @@ from dash import html, dcc, Input, Output, State, callback_context, MATCH, ALL
|
|
10 |
from docx.shared import Pt
|
11 |
from docx.enum.style import WD_STYLE_TYPE
|
12 |
from PyPDF2 import PdfReader
|
13 |
-
import openai
|
14 |
import logging
|
15 |
import threading
|
|
|
|
|
16 |
|
17 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)s %(message)s')
|
18 |
|
19 |
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.BOOTSTRAP], suppress_callback_exceptions=True)
|
20 |
|
21 |
-
|
|
|
22 |
|
23 |
uploaded_files = {}
|
24 |
current_document = None
|
@@ -242,6 +244,42 @@ def update_right_col(n_clicks_list, btn_ids):
|
|
242 |
selected_type = "Shred"
|
243 |
return get_right_col_content(selected_type)
|
244 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
245 |
def generate_document(document_type, file_contents):
|
246 |
prompt = f"""Generate a {document_type} based on the following project artifacts:
|
247 |
{' '.join(file_contents)}
|
@@ -273,19 +311,9 @@ like maybe, could be, should, possible be definitive in your language and confid
|
|
273 |
to be sure we address them.
|
274 |
Now, generate the {document_type}:
|
275 |
"""
|
276 |
-
|
277 |
logging.info(f"Generating document for type: {document_type}")
|
278 |
try:
|
279 |
-
|
280 |
-
model="gpt-4-1106-preview",
|
281 |
-
messages=[
|
282 |
-
{"role": "system", "content": "You are a helpful, expert government proposal writer."},
|
283 |
-
{"role": "user", "content": prompt}
|
284 |
-
],
|
285 |
-
max_tokens=4096,
|
286 |
-
temperature=0.25,
|
287 |
-
)
|
288 |
-
generated_text = response['choices'][0]['message']['content']
|
289 |
logging.info("Document generated successfully.")
|
290 |
return generated_text
|
291 |
except Exception as e:
|
@@ -419,16 +447,8 @@ Now, provide the updated {document_type}:
|
|
419 |
|
420 |
logging.info(f"Updating document via chat for {document_type} instruction: {chat_input}")
|
421 |
try:
|
422 |
-
|
423 |
-
|
424 |
-
messages=[
|
425 |
-
{"role": "system", "content": "You are a helpful, expert government proposal writer."},
|
426 |
-
{"role": "user", "content": prompt}
|
427 |
-
],
|
428 |
-
max_tokens=4096,
|
429 |
-
temperature=0.2,
|
430 |
-
)
|
431 |
-
current_document = response['choices'][0]['message']['content']
|
432 |
logging.info("Document updated via chat successfully.")
|
433 |
return f"Document updated based on: {chat_input}", dcc.Markdown(current_document)
|
434 |
except Exception as e:
|
|
|
10 |
from docx.shared import Pt
|
11 |
from docx.enum.style import WD_STYLE_TYPE
|
12 |
from PyPDF2 import PdfReader
|
|
|
13 |
import logging
|
14 |
import threading
|
15 |
+
import requests
|
16 |
+
import json
|
17 |
|
18 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)s %(message)s')
|
19 |
|
20 |
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.BOOTSTRAP], suppress_callback_exceptions=True)
|
21 |
|
22 |
+
ANTHROPIC_API_URL = "https://api.anthropic.com/v1/messages"
|
23 |
+
ANTHROPIC_API_KEY = os.getenv("ANTHROPIC_API_KEY", "YOUR_ANTHROPIC_API_KEY")
|
24 |
|
25 |
uploaded_files = {}
|
26 |
current_document = None
|
|
|
244 |
selected_type = "Shred"
|
245 |
return get_right_col_content(selected_type)
|
246 |
|
247 |
+
def anthropic_generate(prompt, model="claude-3-opus-20240229", max_tokens=4096, temperature=0.25):
|
248 |
+
headers = {
|
249 |
+
"x-api-key": ANTHROPIC_API_KEY,
|
250 |
+
"anthropic-version": "2023-06-01",
|
251 |
+
"content-type": "application/json"
|
252 |
+
}
|
253 |
+
data = {
|
254 |
+
"model": model,
|
255 |
+
"max_tokens": max_tokens,
|
256 |
+
"temperature": temperature,
|
257 |
+
"messages": [
|
258 |
+
{"role": "user", "content": prompt},
|
259 |
+
],
|
260 |
+
}
|
261 |
+
try:
|
262 |
+
response = requests.post(ANTHROPIC_API_URL, headers=headers, data=json.dumps(data), timeout=120)
|
263 |
+
response.raise_for_status()
|
264 |
+
result = response.json()
|
265 |
+
if "content" in result and isinstance(result["content"], list):
|
266 |
+
content = ""
|
267 |
+
for item in result["content"]:
|
268 |
+
if isinstance(item, dict) and "text" in item:
|
269 |
+
content += item["text"]
|
270 |
+
elif isinstance(item, str):
|
271 |
+
content += item
|
272 |
+
return content
|
273 |
+
elif "content" in result and isinstance(result["content"], str):
|
274 |
+
return result["content"]
|
275 |
+
elif "stop_reason" in result and "output" in result:
|
276 |
+
return result["output"]
|
277 |
+
else:
|
278 |
+
raise Exception(f"Anthropic API unexpected response: {result}")
|
279 |
+
except Exception as e:
|
280 |
+
logging.error(f"Anthropic API error: {str(e)}")
|
281 |
+
raise
|
282 |
+
|
283 |
def generate_document(document_type, file_contents):
|
284 |
prompt = f"""Generate a {document_type} based on the following project artifacts:
|
285 |
{' '.join(file_contents)}
|
|
|
311 |
to be sure we address them.
|
312 |
Now, generate the {document_type}:
|
313 |
"""
|
|
|
314 |
logging.info(f"Generating document for type: {document_type}")
|
315 |
try:
|
316 |
+
generated_text = anthropic_generate(prompt)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
317 |
logging.info("Document generated successfully.")
|
318 |
return generated_text
|
319 |
except Exception as e:
|
|
|
447 |
|
448 |
logging.info(f"Updating document via chat for {document_type} instruction: {chat_input}")
|
449 |
try:
|
450 |
+
updated_content = anthropic_generate(prompt, temperature=0.2)
|
451 |
+
current_document = updated_content
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
452 |
logging.info("Document updated via chat successfully.")
|
453 |
return f"Document updated based on: {chat_input}", dcc.Markdown(current_document)
|
454 |
except Exception as e:
|