Spaces:
Paused
Paused
Update app.py via AI Editor
Browse files
app.py
CHANGED
@@ -12,16 +12,17 @@ from PyPDF2 import PdfReader
|
|
12 |
import logging
|
13 |
import threading
|
14 |
import re
|
15 |
-
import
|
16 |
import sqlite3
|
17 |
import time
|
18 |
|
19 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)s %(message)s')
|
20 |
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
|
|
25 |
|
26 |
DB_PATH = "maiko_dash_docs.sqlite"
|
27 |
|
@@ -646,31 +647,30 @@ def unified_handler(contents_lists, n_clicks_generate_list, n_clicks_download, s
|
|
646 |
else:
|
647 |
context += f"\n\n{inputs.get('shred','')}"
|
648 |
full_prompt = f"{prompt}\n\n{context}"
|
649 |
-
logging.info(f"Prompt sent to
|
650 |
result_holder = {}
|
651 |
|
652 |
-
def
|
653 |
try:
|
654 |
-
|
655 |
-
model=
|
|
|
|
|
656 |
messages=[
|
657 |
-
{"role": "system", "content": "You are a helpful assistant for proposal writing. Always output in markdown, and use markdown tables for spreadsheets if needed."},
|
658 |
{"role": "user", "content": full_prompt}
|
659 |
-
]
|
660 |
-
max_tokens=OPENAI_MAX_TOKENS,
|
661 |
-
temperature=0.3,
|
662 |
)
|
663 |
-
result_text =
|
664 |
result_holder['result'] = result_text
|
665 |
-
logging.info("
|
666 |
except Exception as e:
|
667 |
-
logging.error(f"Error with
|
668 |
result_holder['result'] = f"Error generating document: {str(e)}"
|
669 |
-
t = threading.Thread(target=
|
670 |
t.start()
|
671 |
t.join(timeout=120)
|
672 |
generated_doc = result_holder.get('result', 'Error: No document generated.')
|
673 |
-
logging.info(f"
|
674 |
|
675 |
set_generated_doc(selected_type, generated_doc)
|
676 |
# Preview with fallback
|
|
|
12 |
import logging
|
13 |
import threading
|
14 |
import re
|
15 |
+
import anthropic
|
16 |
import sqlite3
|
17 |
import time
|
18 |
|
19 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)s %(message)s')
|
20 |
|
21 |
+
ANTHROPIC_KEY = os.environ.get("ANTHROPIC_API_KEY", "")
|
22 |
+
anthropic_client = anthropic.Anthropic(api_key=ANTHROPIC_KEY)
|
23 |
+
CLAUDE3_SONNET_MODEL = "claude-3-7-sonnet-20250219"
|
24 |
+
CLAUDE3_MAX_CONTEXT_TOKENS = 200_000
|
25 |
+
CLAUDE3_MAX_OUTPUT_TOKENS = 64_000
|
26 |
|
27 |
DB_PATH = "maiko_dash_docs.sqlite"
|
28 |
|
|
|
647 |
else:
|
648 |
context += f"\n\n{inputs.get('shred','')}"
|
649 |
full_prompt = f"{prompt}\n\n{context}"
|
650 |
+
logging.info(f"Prompt sent to Claude 3 (first 1000 chars): {full_prompt[:1000]}")
|
651 |
result_holder = {}
|
652 |
|
653 |
+
def anthropic_thread():
|
654 |
try:
|
655 |
+
message = anthropic_client.messages.create(
|
656 |
+
model=CLAUDE3_SONNET_MODEL,
|
657 |
+
max_tokens=CLAUDE3_MAX_OUTPUT_TOKENS,
|
658 |
+
system="You are a helpful assistant for proposal writing. Always output in markdown, and use markdown tables for spreadsheets if needed.",
|
659 |
messages=[
|
|
|
660 |
{"role": "user", "content": full_prompt}
|
661 |
+
]
|
|
|
|
|
662 |
)
|
663 |
+
result_text = message.content[0].text if hasattr(message, "content") and message.content else str(message)
|
664 |
result_holder['result'] = result_text
|
665 |
+
logging.info("Claude 3 document generated successfully.")
|
666 |
except Exception as e:
|
667 |
+
logging.error(f"Error with Claude 3 API: {str(e)}")
|
668 |
result_holder['result'] = f"Error generating document: {str(e)}"
|
669 |
+
t = threading.Thread(target=anthropic_thread)
|
670 |
t.start()
|
671 |
t.join(timeout=120)
|
672 |
generated_doc = result_holder.get('result', 'Error: No document generated.')
|
673 |
+
logging.info(f"Claude 3 API returned: {generated_doc[:500]}")
|
674 |
|
675 |
set_generated_doc(selected_type, generated_doc)
|
676 |
# Preview with fallback
|