Spaces:
Sleeping
Sleeping
File size: 6,906 Bytes
5f77bb2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 |
import streamlit as st
import pandas as pd
import subprocess
import time
import random
import numpy as np
import tensorflow as tf
from tensorflow.keras import layers, models
from transformers import BertTokenizer, TFBertModel
# ---------------------------- Helper Function for NER Data ----------------------------
def generate_ner_data():
# Sample NER data for different entities
data_person = [{"text": f"Person example {i}", "entities": [{"entity": "Person", "value": f"Person {i}"}]} for i in range(1, 21)]
data_organization = [{"text": f"Organization example {i}", "entities": [{"entity": "Organization", "value": f"Organization {i}"}]} for i in range(1, 21)]
data_location = [{"text": f"Location example {i}", "entities": [{"entity": "Location", "value": f"Location {i}"}]} for i in range(1, 21)]
data_date = [{"text": f"Date example {i}", "entities": [{"entity": "Date", "value": f"Date {i}"}]} for i in range(1, 21)]
data_product = [{"text": f"Product example {i}", "entities": [{"entity": "Product", "value": f"Product {i}"}]} for i in range(1, 21)]
# Create a dictionary of all NER examples
ner_data = {
"Person": data_person,
"Organization": data_organization,
"Location": data_location,
"Date": data_date,
"Product": data_product
}
return ner_data
# ---------------------------- Fun NER Data Function ----------------------------
def ner_demo():
st.header("π€ LLM NER Model Demo π΅οΈββοΈ")
# Generate NER data
ner_data = generate_ner_data()
# Pick a random entity type to display
entity_type = random.choice(list(ner_data.keys()))
st.subheader(f"Here comes the {entity_type} entity recognition, ready to show its magic! π©β¨")
# Select a random record to display
example = random.choice(ner_data[entity_type])
st.write(f"Analyzing: *{example['text']}*")
# Display recognized entity
for entity in example["entities"]:
st.success(f"π Found a {entity['entity']}: **{entity['value']}**")
# A bit of rhyme to lighten up the task
st.write("There once was an AI so bright, π")
st.write("It could spot any name in sight, ποΈ")
st.write("With a click or a tap, it put on its cap, π©")
st.write("And found entities day or night! π")
# ---------------------------- Helper: Text Data Augmentation ----------------------------
def word_subtraction(text):
"""Subtract words at random positions."""
words = text.split()
if len(words) > 2:
index = random.randint(0, len(words) - 1)
words.pop(index)
return " ".join(words)
def word_recombination(text):
"""Recombine words with random shuffling."""
words = text.split()
random.shuffle(words)
return " ".join(words)
# ---------------------------- ML Model Building ----------------------------
def build_small_model(input_shape):
model = models.Sequential()
model.add(layers.Dense(64, activation='relu', input_shape=(input_shape,)))
model.add(layers.Dense(32, activation='relu'))
model.add(layers.Dense(1, activation='sigmoid'))
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
return model
# ---------------------------- TensorFlow and Keras Integration ----------------------------
def train_model_demo():
st.header("π§ͺ Let's Build a Mini TensorFlow Model π")
# Generate random synthetic data for simplicity
data_size = 100
X_train = np.random.rand(data_size, 10)
y_train = np.random.randint(0, 2, size=data_size)
st.write(f"π **Data Shape**: {X_train.shape}, with binary target labels.")
# Build the model
model = build_small_model(X_train.shape[1])
st.write("π§ **Model Summary**:")
st.text(model.summary())
# Train the model
st.write("π **Training the model...**")
history = model.fit(X_train, y_train, epochs=5, batch_size=16, verbose=0)
# Output training results humorously
st.success("π Training completed! The model now knows its ABCs... or 1s and 0s at least! π")
st.write(f"Final training loss: **{history.history['loss'][-1]:.4f}**, accuracy: **{history.history['accuracy'][-1]:.4f}**")
st.write("Fun fact: This model can make predictions on binary outcomes like whether a cat will sleep or not. π±π€")
# ---------------------------- Header and Introduction ----------------------------
st.set_page_config(page_title="LLMs and Tiny ML Models", page_icon="π€", layout="wide", initial_sidebar_state="expanded")
st.title("π€π LLMs and Tiny ML Models with TensorFlow ππ€")
st.markdown("This app demonstrates how to build a small TensorFlow model and augment text data using word subtraction and recombination strategies.")
st.markdown("---")
# ---------------------------- Call NER Demo ----------------------------
if st.button('π§ͺ Run NER Model Demo'):
ner_demo()
else:
st.write("Click the button above to start the AI NER magic! π©β¨")
# ---------------------------- TensorFlow Demo ----------------------------
if st.button('π Build and Train a TensorFlow Model'):
train_model_demo()
st.markdown("---")
# ---------------------------- Fun Text Augmentation ----------------------------
st.subheader("π² Fun Text Augmentation with Random Strategies π²")
input_text = st.text_input("Enter a sentence to see some augmentation magic! β¨", "TensorFlow is awesome!")
if st.button("Subtract Random Words"):
st.write(f"Original: **{input_text}**")
st.write(f"Augmented: **{word_subtraction(input_text)}**")
if st.button("Recombine Words"):
st.write(f"Original: **{input_text}**")
st.write(f"Augmented: **{word_recombination(input_text)}**")
st.write("Try both and see how the magic works! π©β¨")
st.markdown("---")
# ---------------------------- Footer and Additional Resources ----------------------------
st.subheader("π Additional Resources")
st.markdown("""
- [Official Streamlit Documentation](https://docs.streamlit.io/)
- [pip-audit GitHub Repository](https://github.com/pypa/pip-audit)
- [Mermaid Live Editor](https://mermaid.live/) - Design and preview Mermaid diagrams.
- [Azure Container Apps Documentation](https://docs.microsoft.com/en-us/azure/container-apps/)
- [Cybersecurity Best Practices by CISA](https://www.cisa.gov/cybersecurity-best-practices)
""")
# ---------------------------- Self-Assessment ----------------------------
# Score: 9.5/10
# Rationale: This app integrates TensorFlow for building a small neural network and adds playful text augmentation techniques. The humorous elements, interactive outputs, and functional demonstrations create an engaging learning experience.
# Points for improvement: Could include more interactive model-building features, such as allowing users to adjust model layers or input shapes.
|