levalencia commited on
Commit
d98c8c3
·
1 Parent(s): eb206d0

fix errors

Browse files
Files changed (2) hide show
  1. .vscode/launch.json +21 -0
  2. app.py +14 -13
.vscode/launch.json ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": "0.2.0",
3
+ "configurations": [
4
+ {
5
+ "name": "Python: Streamlit",
6
+ "type": "python",
7
+ "request": "launch",
8
+ "module": "streamlit",
9
+ "args": [
10
+ "run",
11
+ "${workspaceFolder}/app.py"
12
+ ],
13
+ "console": "integratedTerminal",
14
+ "justMyCode": false,
15
+ "env": {
16
+ "PYTHONPATH": "${workspaceFolder}",
17
+ "TOGETHER_API_KEY": "2b1197679a1190d14286fe898f6c0024a872c7b72d31c6b33892d486d97b0c1d"
18
+ }
19
+ }
20
+ ]
21
+ }
app.py CHANGED
@@ -60,7 +60,7 @@ def generate_response(
60
  client = get_client()
61
 
62
  has_images = False
63
- content_blocks = []
64
  image_content = None # To store image data
65
  image_mime_type = None # To store MIME type
66
 
@@ -72,10 +72,8 @@ def generate_response(
72
  image_content = content # Already base64 encoded
73
  image_mime_type = file.type # Store MIME type
74
  else:
75
- content_blocks.append({
76
- "type": "text",
77
- "text": f"File content:\n{content}"
78
- })
79
 
80
  # Build messages
81
  messages = [{"role": "system", "content": system_message}]
@@ -109,11 +107,15 @@ def generate_response(
109
 
110
  else:
111
  # Text-only model request
112
- current_message = {
113
- "role": "user",
114
- "content": [{"type": "text", "text": message}] + content_blocks
115
- }
116
- messages.append(current_message)
 
 
 
 
117
 
118
  stream = client.chat.completions.create(
119
  model="deepseek-ai/DeepSeek-R1",
@@ -121,13 +123,12 @@ def generate_response(
121
  max_tokens=max_tokens,
122
  temperature=temperature,
123
  top_p=top_p,
124
- stream=True
125
  )
126
 
127
  # Stream response
128
  for chunk in stream:
129
- if chunk.choices and chunk.choices[0].delta.content:
130
- yield chunk.choices[0].delta.content
131
 
132
  except Exception as e:
133
  yield f"Error: {str(e)}"
 
60
  client = get_client()
61
 
62
  has_images = False
63
+ formatted_content = message
64
  image_content = None # To store image data
65
  image_mime_type = None # To store MIME type
66
 
 
72
  image_content = content # Already base64 encoded
73
  image_mime_type = file.type # Store MIME type
74
  else:
75
+ formatted_content += f"\n\nContext:\n{file.name}\nContents:\n{content}"
76
+
 
 
77
 
78
  # Build messages
79
  messages = [{"role": "system", "content": system_message}]
 
107
 
108
  else:
109
  # Text-only model request
110
+ messages = [{"role": "system", "content": system_message}]
111
+
112
+ # Add history
113
+ for user_msg, assistant_msg in history:
114
+ messages.append({"role": "user", "content": user_msg})
115
+ messages.append({"role": "assistant", "content": assistant_msg})
116
+
117
+ # Add current message
118
+ messages.append({"role": "user", "content": formatted_content})
119
 
120
  stream = client.chat.completions.create(
121
  model="deepseek-ai/DeepSeek-R1",
 
123
  max_tokens=max_tokens,
124
  temperature=temperature,
125
  top_p=top_p,
126
+ stream=True,
127
  )
128
 
129
  # Stream response
130
  for chunk in stream:
131
+ yield chunk.choices[0].delta.content
 
132
 
133
  except Exception as e:
134
  yield f"Error: {str(e)}"