File size: 1,965 Bytes
4c2a30d
 
 
 
 
 
 
 
 
 
 
 
 
c4892d1
4c2a30d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c4892d1
4c2a30d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c4892d1
4c2a30d
 
c4892d1
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
import gradio as gr
import edge_tts
import asyncio
import tempfile
import os
from huggingface_hub import InferenceClient
import re
from streaming_stt_nemo import Model
import torch
import random
import pandas as pd
from datetime import datetime

# ... (previous code remains the same)

# Initialize an empty DataFrame to store the history
history_df = pd.DataFrame(columns=['Timestamp', 'Request', 'Response'])

def models(text, model="Mixtral 8x7B", seed=42):
    global history_df
    
    seed = int(randomize_seed_fn(seed))
    generator = torch.Generator().manual_seed(seed)  
    
    client = client_fn(model)
    
    generate_kwargs = dict(
        max_new_tokens=300,
        seed=seed
    )    
    formatted_prompt = system_instructions1 + text + "[JARVIS]"
    stream = client.text_generation(
        formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
    output = ""
    for response in stream:
        if not response.token.text == "</s>":
            output += response.token.text
    
    # Add the current interaction to the history DataFrame
    new_row = pd.DataFrame({
        'Timestamp': [datetime.now().strftime("%Y-%m-%d %H:%M:%S")],  # Convert to string
        'Request': [text],
        'Response': [output]
    })
    history_df = pd.concat([history_df, new_row], ignore_index=True)
    
    return output

async def respond(audio, model, seed):
    user = transcribe(audio)
    reply = models(user, model, seed)
    communicate = edge_tts.Communicate(reply)
    with tempfile.NamedTemporaryFile(delete=False, suffix=".wav") as tmp_file:
        tmp_path = tmp_file.name
        await communicate.save(tmp_path)
    yield tmp_path

def display_history():
    return history_df

def download_history():
    return history_df.to_csv(index=False)

# ... (rest of the code remains the same)

if __name__ == "__main__":
    demo.queue(max_size=200).launch(share=True)  # Added share=True for public link