Spaces:
Sleeping
Sleeping
import os, streamlit as st | |
from huggingface_hub import InferenceClient | |
HF_TOKEN = st.secrets["HF_TOKEN"] | |
client = InferenceClient(token=HF_TOKEN) | |
st.title("🎖️ Major Plato – Szimulátor") | |
system = open("system.txt").read() if os.path.exists("system.txt") else "" | |
scenario = open("scenario.txt").read() if os.path.exists("scenario.txt") else "" | |
file_up = st.file_uploader("Töltsd fel a forgatókönyv fájlt (.txt):", type="txt") | |
user_in = st.text_area("Vagy írd be a kérdésedet:") | |
if st.button("Indítás"): | |
if (not file_up and not user_in.strip()) or not system.strip(): | |
st.error("Hiányzik a system.txt vagy user input/fájl!") | |
else: | |
usr_content = "" | |
if file_up: | |
usr_content += file_up.read().decode("utf-8") | |
if user_in.strip(): | |
usr_content += "\n\n" + user_in.strip() | |
messages = [ | |
{"role":"system","content":system}, | |
{"role":"user","content":usr_content} | |
] | |
with st.spinner("Major Plato gondolkodik..."): | |
resp = client.chat_completion( | |
model="meta-llama/Meta-Llama-3-8B-Instruct", | |
messages=messages, | |
max_tokens=200, | |
temperature=0.7 | |
) | |
st.subheader("🗣️ Válasz:") | |
st.write(resp.choices[0].message.content) | |