Bryan Mildort commited on
Commit
221814d
·
1 Parent(s): 66599f3

initial dump

Browse files
Files changed (2) hide show
  1. requirements.txt +5 -0
  2. streamlitapp.py +91 -0
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ streamlit
2
+ pandas
3
+ numpy
4
+ pandas_datareader
5
+ scikit-learn
streamlitapp.py ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import pandas as pd
3
+ import numpy as np
4
+ from pandas_datareader import data
5
+ from datetime import date, datetime
6
+ from sklearn.model_selection import train_test_split
7
+ from sklearn.metrics import r2_score
8
+
9
+ def dateArrange(data):
10
+ dates = data['Date'].to_list()
11
+ epochDates = []
12
+ for i in dates:
13
+ date_time = np.datetime64(i).astype(datetime)
14
+ splitDate = date_time.strftime("%Y-%#m-%d").split('-')
15
+ epochDate = datetime(int(splitDate[0]),int(splitDate[1]),int(splitDate[2]),0,0).timestamp()
16
+ epochDates.append(epochDate)
17
+ data['Date'] = epochDates
18
+
19
+ def dataScraper(ticker):
20
+ startdate = '2012-01-01'
21
+ today = date.today().strftime("%Y-%m-%d")
22
+ enddate = today
23
+ try:
24
+ panel_data = data.DataReader(ticker, 'yahoo', startdate, enddate).reset_index()
25
+ price_close = panel_data['Adj Close']
26
+ price_20dma = price_close.rolling(window=20).mean().to_list()[100:] # 20 Day Moving Average
27
+ price_50dma = price_close.rolling(window=50).mean().to_list()[100:] # 50 day
28
+ price_100dma = price_close.rolling(window=100).mean().to_list()[100:] # 100 day
29
+ panel_data = panel_data.iloc[100: , :]
30
+ panel_data['20dma'] = price_20dma
31
+ panel_data['50dma'] = price_50dma
32
+ panel_data['100dma'] = price_100dma
33
+ dateArrange(panel_data)
34
+ return panel_data
35
+ except:
36
+ print("Error while scraping data")
37
+ return
38
+
39
+ def predictor(stock_ticker):
40
+ prediction_list = []
41
+ stock_data = dataScraper(stock_ticker)
42
+ try:
43
+ stock_data.iat[0,0]
44
+ except:
45
+ print('Error with stock data')
46
+ return
47
+ from sklearn.linear_model import Lasso
48
+ alpha = 1.0
49
+ tol = 0.0008
50
+ max_iter = 10000
51
+ lasso = Lasso(alpha=alpha, max_iter=max_iter, tol=tol)
52
+ test_size = 0.1
53
+ print('\nModel = ' + str(lasso))
54
+ print('\n~ ' + stock_ticker.upper() + ' Next Day Price Predictions ~\n')
55
+ X = stock_data.iloc[:-1 , :]
56
+ y = {
57
+ 'High' : stock_data.iloc[1: , :]['High'],
58
+ 'Low' : stock_data.iloc[1: , :]['Low'],
59
+ 'Close (Adjusted)': stock_data.iloc[1: , :]['Close']
60
+ }
61
+ sample = stock_data.iloc[-1:, :]
62
+ for i in y:
63
+ X_train, X_test, y_train, y_test = train_test_split(X, y[i], test_size=test_size)
64
+ y_pred_lasso = lasso.fit(np.array(X_train), np.array(y_train))
65
+ r2_score_lasso = r2_score(np.array(y_test), y_pred_lasso.predict(np.array(X_test)))
66
+ prediction = y_pred_lasso.predict(np.array(sample))
67
+ prediction_list.append(prediction)
68
+ output = "<h5 style='text-align: center; color: #0E7600;'>" + i + ' - ' + str(prediction) + '\n' + "</h1>"
69
+ st.markdown(output, unsafe_allow_html=True)
70
+ prediction_list.append(lasso)
71
+ prediction_list.append(r2_score_lasso)
72
+ r2score = 'R2 = ' + str(r2_score_lasso)
73
+ print(r2score)
74
+
75
+ st.markdown("<h1 style='text-align: center; color: #2BD314;'>Stock Price Predictor v2.0</h1>", unsafe_allow_html=True)
76
+ st.markdown("<h6 style='text-align: center; color: #094B00;'>by Bryan Mildort</h1>", unsafe_allow_html=True)
77
+ ticker = st.text_input('Enter Ticker to Scrape:', placeholder='SPY')
78
+ col1, col2, col3, col4, col5 = st.columns(5)
79
+ with col1:
80
+ st.write(' ')
81
+ with col2:
82
+ st.write(' ')
83
+ with col3:
84
+ if st.button('Scrape!'):
85
+ predictor(ticker)
86
+ with col4:
87
+ st.write(' ')
88
+ with col5:
89
+ st.write(' ')
90
+
91
+