File size: 5,005 Bytes
829f184
22a5c64
829f184
22a5c64
cc9698d
2c8f446
 
f4de003
829f184
 
 
f4de003
829f184
 
f4de003
 
 
 
9fe6fdf
f4de003
 
 
 
 
 
 
 
 
 
 
829f184
 
 
123222c
9447f48
 
123222c
 
9447f48
 
9fe6fdf
123222c
 
 
 
9447f48
 
123222c
9447f48
 
 
 
 
123222c
 
f4de003
 
829f184
9fe6fdf
 
829f184
dc677ca
 
2c8f446
dc677ca
 
768f592
dc677ca
 
 
768f592
dc677ca
 
 
bca9727
32170e5
bca9727
 
32170e5
 
 
 
 
829f184
9fe6fdf
80a9119
 
8b31607
32170e5
bca9727
829f184
 
bca9727
 
32170e5
bca9727
 
 
8b31607
bca9727
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
import streamlit as st
import openai 
import json
from annotated_text import annotated_text 
import os
import achivenment_standards as data


# OpenAI API ์„ค์ • (ํ™˜๊ฒฝ ๋ณ€์ˆ˜์—์„œ ์ฝ์–ด์˜ด)
openai.api_key = os.getenv("OPENAI_API_KEY")  

#gpt์ด์šฉํ•ด์„œ ์ถ”๋ก ํ•จ์ˆ˜ ๋งŒ๋“ค๊ธฐ
def generate_annotated_text(text):
    response = openai.ChatCompletion.create(
    model="gpt-3.5-turbo-16k",
    messages=[
        {
        "role": "system",
        "content": f"์‚ฌ์šฉ์ž์˜ ์ž…๋ ฅ {text}๋Š” ํ•™์ƒ์˜ ์ฐฝ์˜์  ์ฒดํ—˜ํ™œ๋™์— ๋Œ€ํ•œ ๋ˆ„๊ฐ€๊ธฐ๋ก์ž…๋‹ˆ๋‹ค. ์ด ๋ˆ„๊ฐ€๊ธฐ๋ก์„ ๋ฐ”ํƒ•์œผ๋กœ ํ•™์ƒ์˜ ๊ฐœ๋ณ„์  ํŠน์„ฑ๊ณผ ์„ฑ์ทจ๋ฅผ ๊ฐ•์กฐํ•˜๋Š” ํŠน๊ธฐ์‚ฌํ•ญ ๊ธฐ๋ก์„ ์ƒ์„ฑํ•˜์‹ญ์‹œ์˜ค. ์‚ฌ์šฉ์ž ์ž…๋ ฅ์„ ํŠน๊ธฐ์‚ฌํ•ญ์œผ๋กœ ๋ณ€ํ™˜ํ•  ๋•Œ, ๋ฌธ์žฅ์˜ ๋์€ '~์ž„', '~ํ•จ', '~์Œ'๊ณผ ๊ฐ™์ด ์ข…๊ฒฐํ˜• ์–ด๋ฏธ๋กœ ๋งˆ๋ฌด๋ฆฌํ•ฉ๋‹ˆ๋‹ค. ๋‹ค์Œ ์ž…๋ ฅ๊ณผ ์ถœ๋ ฅ ์˜ˆ์ œ๋ฅผ ์ฐธ๊ณ ํ•˜์„ธ์š”. ์˜ˆ์ œ: ์ž…๋ ฅ: ๋ˆ„๊ฐ€๊ธฐ๋ก1: '๊ณผํ•™ ์‹คํ—˜ ๋Œ€ํšŒ์— ์ฐธ์—ฌํ•จ', ๋ˆ„๊ฐ€๊ธฐ๋ก2: '์‹คํ—˜์—์„œ ๋…์ฐฝ์ ์ธ ์ ‘๊ทผ ๋ฐฉ์‹์„ ์‚ฌ์šฉํ•จ' ์ถœ๋ ฅ: '๊ณผํ•™ ์‹คํ—˜ ๋Œ€ํšŒ์— ์ ๊ทน์ ์œผ๋กœ ์ฐธ์—ฌํ•จ. ์‹คํ—˜์—์„œ ๋…์ฐฝ์ ์ธ ์ ‘๊ทผ ๋ฐฉ์‹์„ ์‚ฌ์šฉํ•˜๋ฉฐ ๋ฌธ์ œ ํ•ด๊ฒฐ ๋Šฅ๋ ฅ์„ ๋ณด์ž„. ์‹คํ—˜ ๊ณผ์ •์—์„œ์˜ ์ฐฝ์˜์„ฑ๊ณผ ์ฃผ๋„์ ์ธ ํƒœ๋„๊ฐ€ ์ธ์ƒ์ ์ž„. ๊ณผํ•™์— ๋Œ€ํ•œ ๊นŠ์€ ์ดํ•ด์™€ ํ˜ธ๊ธฐ์‹ฌ์„ ๋ฐ”ํƒ•์œผ๋กœ ์‹คํ—˜์— ๊ธฐ์—ฌํ•จ.'"
        },
        {
        "role": "user",
        "content": text
        }
    ],
    temperature=1,
    max_tokens=10000,
    top_p=1,
    frequency_penalty=0,
    presence_penalty=0
    )
    return response['choices'][0]['message']['content']

# ์œ ์‚ฌํ•œ ๋ฌธ์žฅ ์ƒ์„ฑ ํ•จ์ˆ˜
def generate_similar_sentences(base_sentence):
    response = openai.ChatCompletion.create(
        model="gpt-3.5-turbo-16k",
        messages=[
            {
                "role": "system",
                "content": f"๋‹ค์Œ์€ '{base_sentence}'์™€ ์œ ์‚ฌํ•œ ํ•™์ƒ์˜ ํŠน์„ฑ๊ณผ ์„ฑ์ทจ๋ฅผ ๊ฐ•์กฐํ•˜๋Š” ์˜ˆ์‹œ ๋ฌธํ•ญ 10๊ฐœ๋ฅผ ๋งŒ๋“ค์–ด๋ผ. ๋ฌธ์žฅ์˜ ๋์€ '~์ž„,~ํ•จ,~์Œ'์œผ๋กœ ๋๋‚˜๋„๋ก ํ•ด์ค˜"
            },
            {
                "role": "user",
                "content": base_sentence
            }
        ],
        temperature=0.7,
        max_tokens=10000,
        top_p=1,
        frequency_penalty=0,
        presence_penalty=0
    )
    generated_sentences = response.choices[0].message['content'].split('\n')
    return [sentence.strip() for sentence in generated_sentences if sentence.strip()]
    
    
# Streamlit ์•ฑ์˜ ์ œ๋ชฉ ๋ฐ ์„ค๋ช…
st.title("๋ˆ„๊ฐ€๊ธฐ๋ก ๊ธฐ๋ฐ˜ ์ฐฝ์ฒด ํŠน๊ธฐ์‚ฌํ•ญ ์ƒ์„ฑ")
st.write("์ฐฝ์ฒด ๋ˆ„๊ฐ€๊ธฐ๋ก์„ ์ž…๋ ฅํ•˜์‹œ๋ฉด, ํ•ด๋‹น ๋ˆ„๊ฐ€๊ธฐ๋ก์— ๊ธฐ๋ฐ˜ํ•œ ์ฐฝ์ฒด ํŠน๊ธฐ์‚ฌํ•ญ ๋ฌธ๊ตฌ๋ฅผ ์ œ์•ˆํ•ฉ๋‹ˆ๋‹ค.")

# ์ฐฝ์˜์  ์ฒดํ—˜ํ™œ๋™ ๋ฐ์ดํ„ฐ ๊ฐ€์ ธ์˜ค๊ธฐ
creative_activities = data.creative_activities

# ์ฐฝ์˜์  ์ฒดํ—˜ํ™œ๋™ ์˜์—ญ ๋“œ๋กญ๋‹ค์šด
activity_area = st.selectbox("์ฐฝ์˜์  ์ฒดํ—˜ํ™œ๋™ ์˜์—ญ์„ ์„ ํƒํ•˜์„ธ์š”:", list(creative_activities.keys()))

# ์„ ํƒ๋œ ์˜์—ญ์— ๋”ฐ๋ฅธ ๋ˆ„๊ฐ€๊ธฐ๋ก ๋ชฉ๋ก
selected_records = creative_activities[activity_area]
selected_record = st.multiselect("๋ˆ„๊ฐ€๊ธฐ๋ก์„ ์„ ํƒํ•˜์„ธ์š”:", selected_records, default=[])

# ์„ ํƒ๋œ ๋ˆ„๊ฐ€๊ธฐ๋ก ์—ฐ๊ฒฐ
concatenated_record = " ".join(selected_record)
st.text_area("์„ ํƒ๋œ ๋ˆ„๊ฐ€๊ธฐ๋ก:", concatenated_record, height=100)

# ์„ธ์…˜ ์ƒํƒœ ์ดˆ๊ธฐํ™”
if 'generated_result' not in st.session_state:
    st.session_state.generated_result = None
if 'selected_sentence' not in st.session_state:
    st.session_state.selected_sentence = None
if 'similar_sentences' not in st.session_state:
    st.session_state.similar_sentences = []

# "ํ‰๊ฐ€ ์ƒ์„ฑ" ๋ฒ„ํŠผ ํด๋ฆญ ์‹œ์˜ ๋™์ž‘
if st.button("๋ฌธ๊ตฌ ์ƒ์„ฑ"):
    with st.spinner('๋‹ต๋ณ€ ์ƒ์„ฑ์ค‘...'):
        result = generate_annotated_text(achievement_standard) 
        st.session_state.generated_result = result
        st.session_state.selected_sentence = None  # ์ƒˆ๋กœ์šด ํ‰๊ฐ€ ์ƒ์„ฑ์‹œ ์„ ํƒ๋œ ๋ฌธ์žฅ ์ดˆ๊ธฐํ™”
        st.session_state.similar_sentences = []  # ์ด์ „ ์œ ์‚ฌํ•œ ๋ฌธ์žฅ๋“ค ์ดˆ๊ธฐํ™”
    exec(result.replace('```', ''))

# ์…€๋ ‰ํŠธ ๋ฐ•์Šค ๋ฐ ์œ ์‚ฌํ•œ ๋ฌธ์žฅ ์ƒ์„ฑ ๋ฒ„ํŠผ ์ถ”๊ฐ€
if st.session_state.generated_result:
    # annotated_text ๊ฒฐ๊ณผ์—์„œ ๋ฌธ์žฅ๋งŒ ์ถ”์ถœ
    result_lines = st.session_state.generated_result.split('\n')
    sentences = [line[start_idx + 2:line.find('",', start_idx)].strip() for line in result_lines if (start_idx := line.find('("')) != -1]
    
    if sentences:
        selected_sentence = st.selectbox("๋ฌธ์žฅ์„ ์„ ํƒํ•˜์„ธ์š”:", sentences, key="sentence_select")
        st.session_state.selected_sentence = selected_sentence

    # ์œ ์‚ฌํ•œ ๋ฌธ์žฅ ์ƒ์„ฑ ๋ฒ„ํŠผ
    if st.button("์œ ์‚ฌํ•œ ๋ฌธ๊ตฌ ์ƒ์„ฑ") and st.session_state.selected_sentence:
        with st.spinner('๋ฌธ์žฅ ์ƒ์„ฑ์ค‘...'):
            st.session_state.similar_sentences = generate_similar_sentences(st.session_state.selected_sentence)

    # ์œ ์‚ฌํ•œ ๋ฌธ์žฅ๋“ค ์ถœ๋ ฅ
    for sentence in st.session_state.similar_sentences:
        st.write(sentence)