awacke1 commited on
Commit
dc0e4d1
·
1 Parent(s): ccd94cc

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +181 -0
app.py ADDED
@@ -0,0 +1,181 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+
3
+
4
+ st.markdown("""
5
+
6
+ ```
7
+ https://huggingface.co/spaces/DockerTemplates/streamlit-docker-example/tree/main
8
+
9
+ # Dockerfile:
10
+ FROM python:3.8.9
11
+
12
+ WORKDIR /app
13
+
14
+ COPY ./requirements.txt /app/requirements.txt
15
+ COPY ./packages.txt /app/packages.txt
16
+
17
+ RUN apt-get update && xargs -r -a /app/packages.txt apt-get install -y && rm -rf /var/lib/apt/lists/*
18
+ RUN pip3 install --no-cache-dir -r /app/requirements.txt
19
+
20
+ # User
21
+ RUN useradd -m -u 1000 user
22
+ USER user
23
+ ENV HOME /home/user
24
+ ENV PATH $HOME/.local/bin:$PATH
25
+
26
+ WORKDIR $HOME
27
+ RUN mkdir app
28
+ WORKDIR $HOME/app
29
+ COPY . $HOME/app
30
+
31
+ EXPOSE 8501
32
+ CMD streamlit run app.py
33
+
34
+ # app.py:
35
+
36
+ import streamlit as st
37
+ import pandas as pd
38
+ import numpy as np
39
+
40
+ st.title('Uber pickups in NYC')
41
+
42
+ DATE_COLUMN = 'date/time'
43
+ DATA_URL = ('https://s3-us-west-2.amazonaws.com/'
44
+ 'streamlit-demo-data/uber-raw-data-sep14.csv.gz')
45
+
46
+ @st.cache
47
+ def load_data(nrows):
48
+ data = pd.read_csv(DATA_URL, nrows=nrows)
49
+ lowercase = lambda x: str(x).lower()
50
+ data.rename(lowercase, axis='columns', inplace=True)
51
+ data[DATE_COLUMN] = pd.to_datetime(data[DATE_COLUMN])
52
+ return data
53
+
54
+ data_load_state = st.text('Loading data...')
55
+ data = load_data(10000)
56
+ data_load_state.text("Done! (using st.cache)")
57
+
58
+ if st.checkbox('Show raw data'):
59
+ st.subheader('Raw data')
60
+ st.write(data)
61
+
62
+ st.subheader('Number of pickups by hour')
63
+ hist_values = np.histogram(data[DATE_COLUMN].dt.hour, bins=24, range=(0,24))[0]
64
+ st.bar_chart(hist_values)
65
+
66
+ # Some number in the range 0-23
67
+ hour_to_filter = st.slider('hour', 0, 23, 17)
68
+ filtered_data = data[data[DATE_COLUMN].dt.hour == hour_to_filter]
69
+
70
+ st.subheader('Map of all pickups at %s:00' % hour_to_filter)
71
+ st.map(filtered_data)
72
+
73
+ # requirements.txt
74
+ streamlit
75
+ numpy
76
+ pandas
77
+ ```
78
+
79
+ # 2. Gradio Docker Example
80
+
81
+ https://huggingface.co/spaces/sayakpaul/demo-docker-gradio/blob/main/Dockerfile
82
+
83
+ ```
84
+ # Dockerfile:
85
+
86
+ # read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
87
+ # you will also find guides on how best to write your Dockerfile
88
+
89
+ FROM python:3.9
90
+
91
+ WORKDIR /code
92
+
93
+ COPY ./requirements.txt /code/requirements.txt
94
+
95
+ RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
96
+
97
+ # Set up a new user named "user" with user ID 1000
98
+ RUN useradd -m -u 1000 user
99
+ # Switch to the "user" user
100
+ USER user
101
+ # Set home to the user's home directory
102
+ ENV HOME=/home/user \
103
+ PATH=/home/user/.local/bin:$PATH
104
+
105
+ # Set the working directory to the user's home directory
106
+ WORKDIR $HOME/app
107
+
108
+ # Copy the current directory contents into the container at $HOME/app setting the owner to the user
109
+ COPY --chown=user . $HOME/app
110
+
111
+ CMD ["python", "main.py"]
112
+
113
+
114
+ # main.py
115
+
116
+ import gradio as gr
117
+ import torch
118
+ import requests
119
+ from torchvision import transforms
120
+
121
+ model = torch.hub.load("pytorch/vision:v0.6.0", "resnet18", pretrained=True).eval()
122
+ response = requests.get("https://git.io/JJkYN")
123
+ labels = response.text.split("\n")
124
+
125
+
126
+ def predict(inp):
127
+ inp = transforms.ToTensor()(inp).unsqueeze(0)
128
+ with torch.no_grad():
129
+ prediction = torch.nn.functional.softmax(model(inp)[0], dim=0)
130
+ confidences = {labels[i]: float(prediction[i]) for i in range(1000)}
131
+ return confidences
132
+
133
+
134
+ def run():
135
+ demo = gr.Interface(
136
+ fn=predict,
137
+ inputs=gr.inputs.Image(type="pil"),
138
+ outputs=gr.outputs.Label(num_top_classes=3),
139
+ )
140
+
141
+ demo.launch(server_name="0.0.0.0", server_port=7860)
142
+
143
+
144
+ if __name__ == "__main__":
145
+ run()
146
+
147
+ # requirements.txt
148
+
149
+ gradio
150
+ torch
151
+ torchvision
152
+ requests
153
+
154
+ ```
155
+
156
+
157
+
158
+
159
+
160
+
161
+
162
+
163
+
164
+ ```
165
+
166
+
167
+
168
+
169
+
170
+
171
+
172
+
173
+
174
+
175
+
176
+
177
+
178
+
179
+
180
+
181
+ """)