utkarsh1797 commited on
Commit
224856c
Β·
verified Β·
1 Parent(s): bf0f6a6

Update streamlit_app.py

Browse files
Files changed (1) hide show
  1. streamlit_app.py +53 -47
streamlit_app.py CHANGED
@@ -1,49 +1,55 @@
1
- import os
2
- os.environ["STREAMLIT_BROWSER_GATHER_USAGE_STATS"] = "false"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
 
4
  import streamlit as st
5
- import spacy
6
- import benepar
7
- from nltk import Tree
8
- import nltk
9
-
10
- # Setup NLTK and benepar
11
- nltk.download('punkt')
12
- benepar.download('benepar_en3')
13
-
14
- nlp = spacy.load("en_core_web_sm")
15
- if "benepar" not in nlp.pipe_names:
16
- nlp.add_pipe("benepar", config={"model": "benepar_en3"})
17
-
18
- st.set_page_config(page_title="Syntax Parser Comparison Tool", layout="wide")
19
- st.title("🌐 Syntax Parser Comparison Tool")
20
- st.write("This tool compares Dependency Parsing, Constituency Parsing, and a simulated Abstract Syntax Representation (ASR).")
21
-
22
- sentence = st.text_input("Enter a sentence:", "John eats an apple.")
23
-
24
- if sentence:
25
- doc = nlp(sentence)
26
- sent = list(doc.sents)[0]
27
-
28
- col1, col2, col3 = st.columns(3)
29
-
30
- with col1:
31
- st.header("Dependency Parsing")
32
- for token in sent:
33
- st.write(f"{token.text} --> {token.dep_} --> {token.head.text}")
34
- st.code(" ".join(f"({token.text}, {token.dep_}, {token.head.text})" for token in sent))
35
-
36
- with col2:
37
- st.header("Constituency Parsing")
38
- tree = sent._.parse_string
39
- st.text(tree)
40
- st.code(Tree.fromstring(tree).pformat())
41
-
42
- with col3:
43
- st.header("Simulated ASR Output")
44
- st.write("Combining phrase structure with dependency head annotations:")
45
- for token in sent:
46
- if token.dep_ in ("nsubj", "obj", "det", "ROOT"):
47
- st.write(f"[{token.text}] - {token.dep_} --> {token.head.text} ({token.pos_})")
48
- st.markdown("_(ASR is simulated by combining POS tags, dependency heads, and phrase information.)_")
49
- st.code(" ".join(f"[{token.text}: {token.dep_} β†’ {token.head.text}]({token.pos_})" for token in sent))
 
1
+ # import os
2
+ # os.environ["STREAMLIT_BROWSER_GATHER_USAGE_STATS"] = "false"
3
+
4
+ # import streamlit as st
5
+ # import spacy
6
+ # import benepar
7
+ # from nltk import Tree
8
+ # import nltk
9
+
10
+ # # Setup NLTK and benepar
11
+ # nltk.download('punkt')
12
+ # benepar.download('benepar_en3')
13
+
14
+ # nlp = spacy.load("en_core_web_sm")
15
+ # if "benepar" not in nlp.pipe_names:
16
+ # nlp.add_pipe("benepar", config={"model": "benepar_en3"})
17
+
18
+ # st.set_page_config(page_title="Syntax Parser Comparison Tool", layout="wide")
19
+ # st.title("🌐 Syntax Parser Comparison Tool")
20
+ # st.write("This tool compares Dependency Parsing, Constituency Parsing, and a simulated Abstract Syntax Representation (ASR).")
21
+
22
+ # sentence = st.text_input("Enter a sentence:", "John eats an apple.")
23
+
24
+ # if sentence:
25
+ # doc = nlp(sentence)
26
+ # sent = list(doc.sents)[0]
27
+
28
+ # col1, col2, col3 = st.columns(3)
29
+
30
+ # with col1:
31
+ # st.header("Dependency Parsing")
32
+ # for token in sent:
33
+ # st.write(f"{token.text} --> {token.dep_} --> {token.head.text}")
34
+ # st.code(" ".join(f"({token.text}, {token.dep_}, {token.head.text})" for token in sent))
35
+
36
+ # with col2:
37
+ # st.header("Constituency Parsing")
38
+ # tree = sent._.parse_string
39
+ # st.text(tree)
40
+ # st.code(Tree.fromstring(tree).pformat())
41
+
42
+ # with col3:
43
+ # st.header("Simulated ASR Output")
44
+ # st.write("Combining phrase structure with dependency head annotations:")
45
+ # for token in sent:
46
+ # if token.dep_ in ("nsubj", "obj", "det", "ROOT"):
47
+ # st.write(f"[{token.text}] - {token.dep_} --> {token.head.text} ({token.pos_})")
48
+ # st.markdown("_(ASR is simulated by combining POS tags, dependency heads, and phrase information.)_")
49
+ # st.code(" ".join(f"[{token.text}: {token.dep_} β†’ {token.head.text}]({token.pos_})" for token in sent))
50
+
51
 
52
  import streamlit as st
53
+
54
+ st.title("βœ… Custom App Loaded")
55
+ st.write("If you can see this, your `streamlit_app.py` is correctly loaded by Hugging Face.")