JustusI commited on
Commit
8c7c78b
·
verified ·
1 Parent(s): b265e97

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +58 -0
app.py ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
3
+
4
+ torch.random.manual_seed(0)
5
+
6
+ model = AutoModelForCausalLM.from_pretrained(
7
+ "microsoft/Phi-3-mini-128k-instruct",
8
+ device_map="cpu",
9
+ torch_dtype="auto",
10
+ trust_remote_code=True,
11
+ )
12
+ tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3-mini-128k-instruct")
13
+
14
+ pipe = pipeline(
15
+ "text-generation",
16
+ model=model,
17
+ tokenizer=tokenizer,
18
+ )
19
+
20
+ generation_args = {
21
+ "max_new_tokens": 500,
22
+ "return_full_text": False,
23
+ "temperature": 0.0,
24
+ "do_sample": False,
25
+ }
26
+
27
+
28
+
29
+
30
+ st.title("💬 Chatbot")
31
+ st.caption("🚀 A streamlit chatbot powered by Microsoft Phi-3-mini")
32
+
33
+ # Initialize chat history
34
+ if 'messages' not in st.session_state:
35
+ st.session_state['messages'] = [] #[{"role": "assistant", "content": "How can I help you?"}]
36
+
37
+ # Display chat messages from history on app rerun
38
+ for messasge in st.session_state.messages:
39
+ st.chat_message(messasge["role"]).write(messasge["content"])
40
+
41
+ # React to user input
42
+ if prompt := st.chat_input():
43
+
44
+ # Display user message in chat message container
45
+ st.chat_message("user").write(prompt)
46
+ # Add user message to chat history
47
+ st.session_state.messages.append({"role": "user", "content": prompt})
48
+
49
+ ##Get response to the message using client
50
+ output = pipe(messages, **generation_args)
51
+
52
+ msg = output[0]['generated_text']
53
+
54
+ # Display assistant response in chat message container
55
+ st.chat_message("assistant").write(msg)
56
+
57
+ # Add assistant response to chat history
58
+ st.session_state.messages.append({"role": "assistant", "content": msg})