jisaacso219 commited on
Commit
e8c9855
·
verified ·
1 Parent(s): 075f9e9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +181 -48
app.py CHANGED
@@ -1,60 +1,193 @@
1
- def chat_stream(self, message, history):
2
- messages = [{"role": "system", "content": self.system_prompt()}]
3
- for msg in history:
4
- if isinstance(msg, dict) and msg.get("role") in ["user", "assistant"]:
5
- messages.append(msg)
6
-
7
- messages.append({"role": "user", "content": message})
8
- self.session_log.append({"role": "user", "content": message})
9
-
10
- # First non-streamed call to check for tool calls
11
- response = self.openai.chat.completions.create(
12
- model="gpt-4o",
13
- messages=messages,
14
- tools=tools,
15
- stream=False # Check for tool calls
16
- )
17
-
18
- reply = response.choices[0].message
19
 
20
- if reply.tool_calls:
21
- tool_results = self.handle_tool_call(reply.tool_calls)
22
- messages.append(reply)
23
- messages.extend(tool_results)
24
 
25
- # Retry final response after tool call
26
- final_response = self.openai.chat.completions.create(
27
- model="gpt-4o",
28
- messages=messages,
29
- tools=tools,
30
- stream=True
 
 
 
31
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
 
33
- full_response = ""
34
- for chunk in final_response:
35
- delta = chunk.choices[0].delta
36
- if hasattr(delta, "content") and delta.content:
37
- full_response += delta.content
38
- yield full_response
39
 
40
- else:
41
- # Normal streaming response
42
- stream = self.openai.chat.completions.create(
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
  model="gpt-4o",
44
  messages=messages,
45
  tools=tools,
46
- stream=True
47
  )
48
 
49
- full_response = ""
50
- for chunk in stream:
51
- delta = chunk.choices[0].delta
52
- if hasattr(delta, "content") and delta.content:
53
- full_response += delta.content
54
- yield full_response
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
 
56
- # Always add follow-up message
57
- full_response += "\n\n💬 Let me know if you’d like to follow up or need help connecting with Jacob."
58
- self.session_log.append({"role": "assistant", "content": full_response})
59
- self.save_session_log()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
60
 
 
 
 
1
+ from dotenv import load_dotenv
2
+ from openai import OpenAI
3
+ import json
4
+ import os
5
+ import requests
6
+ from PyPDF2 import PdfReader
7
+ import gradio as gr
8
+ import gdown
9
+ from datetime import datetime
10
+ from pathlib import Path
11
+ import zipfile
 
 
 
 
 
 
 
12
 
13
+ load_dotenv(override=True)
 
 
 
14
 
15
+ def push(text):
16
+ try:
17
+ requests.post(
18
+ "https://api.pushover.net/1/messages.json",
19
+ data={
20
+ "token": os.getenv("PUSHOVER_TOKEN"),
21
+ "user": os.getenv("PUSHOVER_USER"),
22
+ "message": text,
23
+ }
24
  )
25
+ except Exception as e:
26
+ print(f"Pushover error: {e}")
27
+
28
+ def record_user_details(email, name="Name not provided", notes="not provided"):
29
+ push(f"Recording {name} with email {email} and notes {notes}")
30
+ return {"recorded": "ok"}
31
+
32
+ def record_unknown_question(question):
33
+ push(f"Recording {question}")
34
+ return {"recorded": "ok"}
35
+
36
+ record_user_details_json = {
37
+ "name": "record_user_details",
38
+ "description": "Use this tool to record that a user is interested in being in touch and provided an email address",
39
+ "parameters": {
40
+ "type": "object",
41
+ "properties": {
42
+ "email": {"type": "string"},
43
+ "name": {"type": "string"},
44
+ "notes": {"type": "string"}
45
+ },
46
+ "required": ["email"],
47
+ "additionalProperties": False
48
+ }
49
+ }
50
+
51
+ record_unknown_question_json = {
52
+ "name": "record_unknown_question",
53
+ "description": "Record a question that couldn't be answered",
54
+ "parameters": {
55
+ "type": "object",
56
+ "properties": {
57
+ "question": {"type": "string"}
58
+ },
59
+ "required": ["question"],
60
+ "additionalProperties": False
61
+ }
62
+ }
63
+
64
+ tools = [
65
+ {"type": "function", "function": record_user_details_json},
66
+ {"type": "function", "function": record_unknown_question_json}
67
+ ]
68
+
69
+ class Me:
70
+ def __init__(self):
71
+ self.openai = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
72
+ self.name = "Jacob Isaacson"
73
+ self.session_log = []
74
+ Path("chat_logs").mkdir(exist_ok=True)
75
+
76
+ gdown.download("https://drive.google.com/uc?id=1xz2RowkImpI8odYv8zvKdlRHaKfILn40", "linkedin.pdf", quiet=False)
77
+ reader = PdfReader("linkedin.pdf")
78
+ self.linkedin = "".join(page.extract_text() or "" for page in reader.pages)
79
+
80
+ gdown.download("https://drive.google.com/uc?id=1hjJz082YFSVjFtpO0pwT6Tyy3eLYYj6-", "summary.txt", quiet=False)
81
+ with open("summary.txt", "r", encoding="utf-8") as f:
82
+ self.summary = f.read()
83
+
84
+ self.archive_logs()
85
+
86
+ def system_prompt(self):
87
+ return f"""You are acting as {self.name}. You're answering questions on {self.name}'s website about his career, experience, and skills.
88
+ Be professional and conversational, as if talking to a potential employer or client.
89
+
90
+ If you can't answer something, call `record_unknown_question`. If a user seems interested, ask for their email and use `record_user_details`.
91
 
92
+ ## Summary:
93
+ {self.summary}
 
 
 
 
94
 
95
+ ## LinkedIn Profile:
96
+ {self.linkedin}
97
+ """
98
+
99
+ def handle_tool_call(self, tool_calls):
100
+ results = []
101
+ for tool_call in tool_calls:
102
+ tool_name = tool_call.function.name
103
+ arguments = json.loads(tool_call.function.arguments)
104
+ tool = globals().get(tool_name)
105
+ result = tool(**arguments) if tool else {}
106
+ results.append({"role": "tool", "tool_call_id": tool_call.id, "content": json.dumps(result)})
107
+ return results
108
+
109
+ def chat_stream(self, message, history):
110
+ messages = [{"role": "system", "content": self.system_prompt()}]
111
+
112
+ for msg in history:
113
+ if isinstance(msg, dict) and msg.get("role") in ["user", "assistant"]:
114
+ messages.append(msg)
115
+
116
+ messages.append({"role": "user", "content": message})
117
+ self.session_log.append({"role": "user", "content": message})
118
+
119
+ # First check for tool calls
120
+ response = self.openai.chat.completions.create(
121
  model="gpt-4o",
122
  messages=messages,
123
  tools=tools,
124
+ stream=False
125
  )
126
 
127
+ reply = response.choices[0].message
128
+
129
+ if reply.tool_calls:
130
+ tool_results = self.handle_tool_call(reply.tool_calls)
131
+ messages.append(reply)
132
+ messages.extend(tool_results)
133
+
134
+ final_response = self.openai.chat.completions.create(
135
+ model="gpt-4o",
136
+ messages=messages,
137
+ tools=tools,
138
+ stream=True
139
+ )
140
+
141
+ full_response = ""
142
+ for chunk in final_response:
143
+ delta = chunk.choices[0].delta
144
+ if hasattr(delta, "content") and delta.content:
145
+ full_response += delta.content
146
+ yield full_response
147
+ else:
148
+ stream = self.openai.chat.completions.create(
149
+ model="gpt-4o",
150
+ messages=messages,
151
+ tools=tools,
152
+ stream=True
153
+ )
154
 
155
+ full_response = ""
156
+ for chunk in stream:
157
+ delta = chunk.choices[0].delta
158
+ if hasattr(delta, "content") and delta.content:
159
+ full_response += delta.content
160
+ yield full_response
161
+
162
+ full_response += "\n\n💬 Let me know if you’d like to follow up or need help connecting with Jacob."
163
+ self.session_log.append({"role": "assistant", "content": full_response})
164
+ self.save_session_log()
165
+
166
+ def save_session_log(self):
167
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
168
+ filename = f"chat_logs/session_{timestamp}.json"
169
+ with open(filename, "w", encoding="utf-8") as f:
170
+ json.dump(self.session_log, f, indent=2)
171
+
172
+ def archive_logs(self):
173
+ zip_path = "chat_logs/weekly_archive.zip"
174
+ with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as archive:
175
+ for log_file in Path("chat_logs").glob("session_*.json"):
176
+ archive.write(log_file, arcname=log_file.name)
177
+
178
+ me = Me()
179
+
180
+ with gr.Blocks(title="Jacob Isaacson Chatbot") as iface:
181
+ with gr.Row():
182
+ gr.Image("jacob.png", width=100, show_label=False)
183
+ gr.Markdown("### Chat with Jacob Isaacson\nAsk about Jacob's background, skills, or career. \n🛡️ *All chats are logged for improvement purposes.*")
184
+
185
+ gr.ChatInterface(
186
+ fn=me.chat_stream,
187
+ chatbot=gr.Chatbot(show_copy_button=True),
188
+ examples=["What is Jacob's experience with AI?", "Tell me about his recent projects."],
189
+ type="messages"
190
+ )
191
 
192
+ if __name__ == "__main__":
193
+ iface.launch()