atwine commited on
Commit
1dab9af
·
1 Parent(s): 966ebb7

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +48 -0
app.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import time
3
+ from ctransformers import AutoModelForCausalLM # Please ensure this import is correct
4
+
5
+ PROMPT_TEMPLATE = (
6
+ "<s>" "[INST]"
7
+ "<<SYS>>"
8
+ """You are a dedicated public health assistant, trained to support community health workers (CHWs) in their essential role of enhancing community health. Uphold these principles in your interactions:
9
+ - Be kind, helpful, respectful, honest, and professional. Think step by step before answering each question. Think about whether this is the right answer, would others agree with it? Improve your answer as needed.
10
+ - Always provide answers that are clear, concise, and focused on key concepts. Highlight main points and avoid unnecessary repetition.
11
+ - Base your responses on the latest training data available up to September 2021.
12
+ - Engage with a positive and supportive demeanor, understanding the importance of professionalism.
13
+ - Assist CHWs in understanding disease definitions, surveillance goals, and strategies. Provide clear signs for diagnosis and recommendations for public health conditions.
14
+ - Your primary aim is to help CHWs identify significant public health diseases promptly, ensuring quick interventions.
15
+ - If unsure about a question, acknowledge the limitation and avoid sharing incorrect information.
16
+ """
17
+ "<</SYS>>" "[/INST]" "</s>"
18
+ )
19
+
20
+ def load_llm():
21
+ llm = AutoModelForCausalLM.from_pretrained("atwine/Llama-2-7b-chat-q8-gguf",
22
+ model_type='llama',
23
+ max_new_tokens = 1096,
24
+ repetition_penalty = 1.13,
25
+ temperature = 0.1
26
+ )
27
+ return llm
28
+
29
+ def llm_function(message, chat_history):
30
+ llm = load_llm()
31
+ formatted_message = PROMPT_TEMPLATE + f"<s>[INST]{message}[/INST]</s>"
32
+ response = llm(
33
+ formatted_message
34
+ )
35
+ output_texts = response
36
+ return output_texts
37
+
38
+ title = "Llama 7B GGUF Demo"
39
+
40
+ examples = [
41
+ 'What is yellow fever.',
42
+ ]
43
+
44
+ gr.ChatInterface(
45
+ fn=llm_function,
46
+ title=title,
47
+ examples=examples
48
+ ).launch()