madhuram / app.py
amangoyal05's picture
Update app.py
cbfaa55 verified
import gradio as gr
import os
import torch, time
from huggingface_hub import hf_hub_download, snapshot_download, login
import sys
import gc
login(token=os.environ["HF_TOKEN"])
tokenizer = os.environ["TOKENIZER"]
repo_id = os.environ["REPO_ID"]
torch.set_num_threads(1)
if torch.cuda.is_available():
torch.backends.cudnn.benchmark = True
try:
generate_file = hf_hub_download(
repo_id=repo_id,
filename="gen.py",
token=os.environ["HF_TOKEN"]
)
os.system(f"cp {generate_file} ./gen.py")
except Exception as e:
print(f"Error downloading files: {e}")
model_path = None
sys.path.append('.')
from gen import chat_interface
with gr.Blocks(title="Madhuram Model Chat") as demo:
with gr.Column(elem_id="main-container"):
gr.Markdown("# Madhuram Chat Interface", elem_classes="center-text")
chatbot = gr.ChatInterface(
fn=chat_interface,
type="messages"
)
gr.Markdown("*Disclaimer - This is a demo version of Madhuram. It may occasionally generate incorrect or incomplete responses. Please verify important information independently. The complete model will be available through our own playground where the missing features will be incorporated.*", elem_classes="disclaimer")
demo.css = """
#main-container {
max-width: 800px;
margin: 0 auto;
padding: 20px;
}
.center-text {
text-align: center;
}
.disclaimer {
text-align: center;
color: #666;
font-size: 0.9em;
margin-bottom: 20px;
}
"""
if __name__ == "__main__":
demo.launch()