Neurasense / app.py
Sephfox's picture
Update app.py
58b97f2 verified
raw
history blame
3.01 kB
import streamlit as st
from typing import List, Tuple
from transformers import pipeline
import matplotlib.pyplot as plt
# Constants
WIDTH, HEIGHT = 600, 600
ELASTICITY = 0.3
DAMPING = 0.7
# Create touch points
num_points = 20
touch_points: List[Tuple[float, float]] = [(x, y) for x in range(50, WIDTH-50, int((WIDTH-100)/(num_points-1))) for y in range(50, HEIGHT-50, int((HEIGHT-100)/(num_points-1)))]
original_points = touch_points.copy()
velocities: List[Tuple[float, float]] = [(0.0, 0.0)] * len(touch_points)
is_affected: List[bool] = [False] * len(touch_points)
# Set up the Hugging Face pipeline
@st.cache_resource
def load_model():
return pipeline('text-generation', model='gpt2')
text_generator = load_model()
# Streamlit app
st.title("Artificial Touch Simulation")
# Create a Streamlit container for the touch simulation
touch_container = st.container()
def update_points():
global touch_points, velocities, is_affected
# Apply spring force
for i, (x, y) in enumerate(touch_points):
force_x = (original_points[i][0] - x) * ELASTICITY
force_y = (original_points[i][1] - y) * ELASTICITY
velocities[i] = (velocities[i][0] + force_x, velocities[i][1] + force_y)
# Apply damping
for i, (vx, vy) in enumerate(velocities):
velocities[i] = (vx * DAMPING, vy * DAMPING)
# Update position
for i, (x, y) in enumerate(touch_points):
vx, vy = velocities[i]
touch_points[i] = (x + vx, y + vy)
# Reset affected flags
is_affected = [False] * len(touch_points)
def on_tap(x, y):
global touch_points, velocities, is_affected
for i, (tx, ty) in enumerate(touch_points):
distance = ((tx - x)**2 + (ty - y)**2)**0.5
if distance < 30:
force_x = (tx - x) / distance
force_y = (ty - y) / distance
velocities[i] = (velocities[i][0] - force_x * 10, velocities[i][1] - force_y * 10)
is_affected[i] = True
# Generate a description of the touch
st.write(f"Touch at ({x:.2f}, {y:.2f})")
text = text_generator(f"The user touched the screen at ({x:.2f}, {y:.2f}).", max_length=100, num_return_sequences=1, do_sample=True, top_k=50, top_p=0.95, num_beams=1)[0]['generated_text']
st.write(text)
update_points()
# Initialize session state
if 'x' not in st.session_state:
st.session_state.x = 0
if 'y' not in st.session_state:
st.session_state.y = 0
# Main app logic
fig, ax = plt.subplots(figsize=(6, 6))
ax.set_xlim(0, WIDTH)
ax.set_ylim(0, HEIGHT)
for i, (x, y) in enumerate(touch_points):
color = "red" if is_affected[i] else "navy"
ax.add_artist(plt.Circle((x, y), 5, color=color, alpha=0.5))
touch_container.pyplot(fig)
if touch_container.button("Tap the screen"):
on_tap(st.session_state.x, st.session_state.y)
st.session_state.x = st.slider("X coordinate", 0, WIDTH, st.session_state.x)
st.session_state.y = st.slider("Y coordinate", 0, HEIGHT, st.session_state.y)
update_points()