File size: 1,340 Bytes
10d3bad
cbf71a4
 
 
 
 
10d3bad
 
 
 
 
cbf71a4
10d3bad
 
 
cbf71a4
10d3bad
 
 
 
 
cbf71a4
10d3bad
 
cbf71a4
 
 
 
 
10d3bad
 
cbf71a4
10d3bad
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
import os, streamlit as st
from huggingface_hub import InferenceClient

HF_TOKEN = st.secrets["HF_TOKEN"]
client = InferenceClient(token=HF_TOKEN)

st.title("🎖️ Major Plato – Szimulátor")
system = open("system.txt").read() if os.path.exists("system.txt") else ""
scenario = open("scenario.txt").read() if os.path.exists("scenario.txt") else ""
file_up = st.file_uploader("Töltsd fel a forgatókönyv fájlt (.txt):", type="txt")
user_in = st.text_area("Vagy írd be a kérdésedet:")

if st.button("Indítás"):
    if (not file_up and not user_in.strip()) or not system.strip():
        st.error("Hiányzik a system.txt vagy user input/fájl!")
    else:
        usr_content = ""
        if file_up:
            usr_content += file_up.read().decode("utf-8")
        if user_in.strip():
            usr_content += "\n\n" + user_in.strip()
        messages = [
            {"role":"system","content":system},
            {"role":"user","content":usr_content}
        ]
        with st.spinner("Major Plato gondolkodik..."):
            resp = client.chat_completion(
                model="meta-llama/Meta-Llama-3-8B-Instruct",
                messages=messages,
                max_tokens=200,
                temperature=0.7
            )
        st.subheader("🗣️ Válasz:")
        st.write(resp.choices[0].message.content)