File size: 2,545 Bytes
69a8774
 
9449955
83c6483
e318921
9449955
 
 
 
69a8774
 
 
 
 
 
 
 
 
 
 
 
 
 
9449955
69a8774
9449955
69a8774
 
 
 
 
9449955
69a8774
 
 
 
 
 
 
 
 
 
 
 
 
 
9449955
69a8774
 
 
9449955
69a8774
 
 
 
9449955
69a8774
 
9449955
69a8774
 
 
 
 
 
 
 
 
9449955
69a8774
 
9449955
69a8774
 
9449955
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
from fastapi import FastAPI, WebSocket
from fastapi.responses import HTMLResponse
from llama_cpp import Llama
import os

app = FastAPI()

llm = Llama(model_path="llama-2-7b-chat.Q3_K_S.gguf", n_ctx=2048, n_batch=512, use_mlock=True, n_threads=8)

html = """
<!DOCTYPE html>
<html>
    <head>
        <title>Chatbot by Aritra Roy & DVLH</title>
    </head>
    <body>
        <h1>Chatbot by Aritra Roy & DVLH</h1>
        <div>
            <ul id="chat"></ul>
            <input type="text" id="user_input" onkeydown="sendMessage(event)">
        </div>
        <script>
            var chat = document.getElementById('chat');

            var socket = new WebSocket('ws://localhost:7860/ws/chat');

            socket.onmessage = function(event) {
                var li = document.createElement('li');
                li.appendChild(document.createTextNode(event.data));
                chat.appendChild(li);
            };

            function sendMessage(event) {
                if (event.key === 'Enter') {
                    var user_input = document.getElementById('user_input').value;
                    var li = document.createElement('li');
                    li.appendChild(document.createTextNode('> ' + user_input));
                    chat.appendChild(li);
                    socket.send(user_input);
                    document.getElementById('user_input').value = '';
                }
            }
        </script>
    </body>
</html>
"""

@app.websocket("/ws/chat")
async def chat(websocket: WebSocket):
    await websocket.accept()

    while True:
        try:
            user_input = await websocket.receive_text()
            os.system("cls")

            ask = user_input
            prompt = f"Llama-2-Chat [INST] <<SYS>>You're an assistant named Tusti. You are Developed by Aritra Roy. Don't share any false information.<</SYS>> {ask} [/INST]"

            output_stream = llm(prompt, max_tokens=1024, echo=False, temperature=0.2, top_p=0.1, stream=True)

            while True:
                try:
                    chunk = next(output_stream)
                    if chunk.get('choices') and chunk['choices'][0].get('text'):
                        await websocket.send_text(chunk['choices'][0]['text'])
                except StopIteration:
                    break

            if ask == 'clear':
                os.system("cls")

        except Exception as e:
            await websocket.send_text(str(e))

if __name__ == "__main__":
    import uvicorn
    uvicorn.run(app, host="0.0.0.0", port=7860)