Neurasense / app.py
Sephfox's picture
Update app.py
b083c09 verified
raw
history blame
4.05 kB
import streamlit as st
import numpy as np
from typing import List, Tuple
from transformers import pipeline
import matplotlib.pyplot as plt
from matplotlib.backends.backend_agg import RendererAgg
from streamlit_drawable_canvas import st_canvas
import time
# Constants
WIDTH, HEIGHT = 600, 600
ELASTICITY = 0.3
DAMPING = 0.7
# Create sensation map
sensation_map = np.zeros((HEIGHT, WIDTH))
for y in range(HEIGHT):
for x in range(WIDTH):
# Create a complex sensation map with various regions
sensation_map[y, x] = np.sin(x/30) * np.cos(y/30) * 5 + np.random.normal(0, 0.5)
# Set up the Hugging Face pipeline
@st.cache_resource
def load_model():
return pipeline('text-generation', model='gpt2')
text_generator = load_model()
# Streamlit app
st.title("Advanced Artificial Touch Simulation")
# Create a Streamlit container for the touch simulation
touch_container = st.container()
def calculate_sensation(x, y, pressure, duration):
# Get sensation from the map
base_sensation = sensation_map[int(y), int(x)]
# Modify sensation based on pressure and duration
modified_sensation = base_sensation * pressure * (1 + np.log(duration + 1))
return modified_sensation
def on_touch(x, y, pressure, duration):
sensation = calculate_sensation(x, y, pressure, duration)
# Generate a description of the touch
st.write(f"Touch at ({x:.2f}, {y:.2f}) with pressure {pressure:.2f} for {duration:.2f} seconds")
st.write(f"Sensation: {sensation:.2f}")
prompt = f"The user touched the screen at ({x:.2f}, {y:.2f}) with a pressure of {pressure:.2f} for {duration:.2f} seconds, resulting in a sensation of {sensation:.2f}. Describe the experience:"
text = text_generator(prompt, max_length=100, num_return_sequences=1, do_sample=True, top_k=50, top_p=0.95, num_beams=1)[0]['generated_text']
st.write(text)
# Initialize session state
if 'touch_start_time' not in st.session_state:
st.session_state.touch_start_time = None
if 'last_touch_position' not in st.session_state:
st.session_state.last_touch_position = None
# Main app logic
fig, ax = plt.subplots(figsize=(6, 6))
ax.imshow(sensation_map, cmap='coolwarm', extent=[0, WIDTH, HEIGHT, 0])
ax.axis('off')
# Use streamlit-drawable-canvas for interaction
canvas_result = st_canvas(
fill_color="rgba(255, 165, 0, 0.3)",
stroke_width=3,
stroke_color="#e00",
background_color="#eee",
background_image=fig,
update_streamlit=True,
height=HEIGHT,
width=WIDTH,
drawing_mode="point",
point_display_radius=0,
key="canvas",
)
# Handle touch events
if canvas_result.json_data is not None:
objects = canvas_result.json_data["objects"]
if len(objects) > 0:
last_object = objects[-1]
current_position = (last_object["left"], last_object["top"])
if st.session_state.touch_start_time is None:
st.session_state.touch_start_time = time.time()
st.session_state.last_touch_position = current_position
else:
# Calculate pressure based on movement
if st.session_state.last_touch_position is not None:
dx = current_position[0] - st.session_state.last_touch_position[0]
dy = current_position[1] - st.session_state.last_touch_position[1]
distance = np.sqrt(dx**2 + dy**2)
pressure = 1 + distance / 10 # Adjust this formula as needed
else:
pressure = 1.0
duration = time.time() - st.session_state.touch_start_time
on_touch(current_position[0], current_position[1], pressure, duration)
st.session_state.last_touch_position = current_position
else:
st.session_state.touch_start_time = None
st.session_state.last_touch_position = None
st.write("Click and drag on the image to simulate touch. The color represents different sensations.")
st.write("Red areas are more sensitive (pain or intense pleasure), while blue areas are less sensitive.")