Spaces:
Runtime error
Runtime error
[rebuild] Deploy complete pattern analysis implementation
Browse files- Dockerfile +0 -0
- app.py +0 -0
- indicator_chart_generator.py +0 -22
- test_charts.py +0 -34
- test_data.py +0 -23
- test_imports.py +0 -29
- test_indicators.py +0 -28
- test_model.py +0 -34
- train.py +0 -29
- trigger_handler.py +0 -21
Dockerfile
CHANGED
|
Binary files a/Dockerfile and b/Dockerfile differ
|
|
|
app.py
CHANGED
|
Binary files a/app.py and b/app.py differ
|
|
|
indicator_chart_generator.py
DELETED
|
@@ -1,22 +0,0 @@
|
|
| 1 |
-
from indicator_analyzer import IndicatorAnalyzer
|
| 2 |
-
from lightweight_charts import Chart
|
| 3 |
-
|
| 4 |
-
class AutoIndicatorGenerator:
|
| 5 |
-
def __init__(self):
|
| 6 |
-
self.analyzer = IndicatorAnalyzer()
|
| 7 |
-
|
| 8 |
-
def generate_indicator_charts(self, ohlcv_data):
|
| 9 |
-
indicators = self.analyzer.analyze_indicators(ohlcv_data)
|
| 10 |
-
|
| 11 |
-
charts = []
|
| 12 |
-
for indicator in indicators:
|
| 13 |
-
chart = Chart()
|
| 14 |
-
chart.candlestick(ohlcv_data)
|
| 15 |
-
chart.add_indicator(
|
| 16 |
-
type=indicator['type'],
|
| 17 |
-
values=indicator['values'],
|
| 18 |
-
parameters=indicator['parameters']
|
| 19 |
-
)
|
| 20 |
-
charts.append(chart)
|
| 21 |
-
|
| 22 |
-
return charts
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
test_charts.py
DELETED
|
@@ -1,34 +0,0 @@
|
|
| 1 |
-
import numpy as np
|
| 2 |
-
import pandas as pd
|
| 3 |
-
from auto_chart_generator import AutoChartGenerator
|
| 4 |
-
from indicator_chart_generator import AutoIndicatorGenerator
|
| 5 |
-
|
| 6 |
-
# Generate 150 days of OHLCV data
|
| 7 |
-
np.random.seed(42)
|
| 8 |
-
days = 150
|
| 9 |
-
base_price = 100
|
| 10 |
-
|
| 11 |
-
price_changes = np.random.normal(0.001, 0.02, days).cumsum()
|
| 12 |
-
prices = base_price * (1 + price_changes)
|
| 13 |
-
|
| 14 |
-
test_data = {
|
| 15 |
-
'open': prices * (1 + np.random.normal(0, 0.005, days)),
|
| 16 |
-
'high': prices * (1 + np.random.normal(0.01, 0.008, days)),
|
| 17 |
-
'low': prices * (1 + np.random.normal(-0.01, 0.008, days)),
|
| 18 |
-
'close': prices * (1 + np.random.normal(0, 0.005, days)),
|
| 19 |
-
'volume': np.random.normal(1000000, 200000, days)
|
| 20 |
-
}
|
| 21 |
-
|
| 22 |
-
df = pd.DataFrame(test_data)
|
| 23 |
-
df['high'] = df[['open', 'high', 'close']].max(axis=1)
|
| 24 |
-
df['low'] = df[['open', 'low', 'close']].min(axis=1)
|
| 25 |
-
|
| 26 |
-
# Test pattern charts
|
| 27 |
-
pattern_gen = AutoChartGenerator()
|
| 28 |
-
pattern_charts = pattern_gen.generate_pattern_charts(df)
|
| 29 |
-
print("Generated Pattern Charts:", len(pattern_charts))
|
| 30 |
-
|
| 31 |
-
# Test indicator charts
|
| 32 |
-
indicator_gen = AutoIndicatorGenerator()
|
| 33 |
-
indicator_charts = indicator_gen.generate_indicator_charts(df)
|
| 34 |
-
print("Generated Indicator Charts:", len(indicator_charts))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
test_data.py
DELETED
|
@@ -1,23 +0,0 @@
|
|
| 1 |
-
import numpy as np
|
| 2 |
-
import pandas as pd
|
| 3 |
-
|
| 4 |
-
# Generate 150 days test data
|
| 5 |
-
days = 150
|
| 6 |
-
base_price = 100
|
| 7 |
-
|
| 8 |
-
# Generate price changes with realistic volatility
|
| 9 |
-
price_changes = np.random.normal(0.001, 0.02, days).cumsum()
|
| 10 |
-
base_prices = base_price * (1 + price_changes)
|
| 11 |
-
|
| 12 |
-
# Create DataFrame with all OHLCV components
|
| 13 |
-
test_data = pd.DataFrame({
|
| 14 |
-
'open': base_prices * (1 + np.random.normal(0, 0.005, days)), # Opening prices
|
| 15 |
-
'high': base_prices * (1 + np.random.normal(0.01, 0.008, days)), # Day's high prices
|
| 16 |
-
'low': base_prices * (1 + np.random.normal(-0.01, 0.008, days)), # Day's low prices
|
| 17 |
-
'close': base_prices * (1 + np.random.normal(0, 0.005, days)), # Closing prices
|
| 18 |
-
'volume': np.random.normal(1000000, 200000, days).astype(int) # Daily volume
|
| 19 |
-
})
|
| 20 |
-
|
| 21 |
-
# Ensure high is always highest and low is always lowest
|
| 22 |
-
test_data['high'] = test_data[['open', 'high', 'close']].max(axis=1)
|
| 23 |
-
test_data['low'] = test_data[['open', 'low', 'close']].min(axis=1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
test_imports.py
DELETED
|
@@ -1,29 +0,0 @@
|
|
| 1 |
-
try:
|
| 2 |
-
import gradio as gr
|
| 3 |
-
print("✅ Gradio imported successfully")
|
| 4 |
-
except:
|
| 5 |
-
print("❌ Gradio import failed")
|
| 6 |
-
|
| 7 |
-
try:
|
| 8 |
-
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 9 |
-
print("✅ Transformers imported successfully")
|
| 10 |
-
except:
|
| 11 |
-
print("❌ Transformers import failed")
|
| 12 |
-
|
| 13 |
-
try:
|
| 14 |
-
from lightweight_charts import Chart
|
| 15 |
-
print("✅ Lightweight Charts imported successfully")
|
| 16 |
-
except:
|
| 17 |
-
print("❌ Lightweight Charts import failed")
|
| 18 |
-
|
| 19 |
-
try:
|
| 20 |
-
from fastapi import FastAPI
|
| 21 |
-
print("✅ FastAPI imported successfully")
|
| 22 |
-
except:
|
| 23 |
-
print("❌ FastAPI import failed")
|
| 24 |
-
|
| 25 |
-
try:
|
| 26 |
-
from datasets import Dataset, load_dataset
|
| 27 |
-
print("✅ Datasets imported successfully")
|
| 28 |
-
except:
|
| 29 |
-
print("❌ Datasets import failed")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
test_indicators.py
DELETED
|
@@ -1,28 +0,0 @@
|
|
| 1 |
-
import numpy as np
|
| 2 |
-
import pandas as pd
|
| 3 |
-
from indicator_analyzer import IndicatorAnalyzer
|
| 4 |
-
|
| 5 |
-
# Generate 150 days of OHLCV data
|
| 6 |
-
np.random.seed(42)
|
| 7 |
-
days = 150
|
| 8 |
-
base_price = 100
|
| 9 |
-
|
| 10 |
-
price_changes = np.random.normal(0.001, 0.02, days).cumsum()
|
| 11 |
-
prices = base_price * (1 + price_changes)
|
| 12 |
-
|
| 13 |
-
test_data = {
|
| 14 |
-
'open': prices * (1 + np.random.normal(0, 0.005, days)),
|
| 15 |
-
'high': prices * (1 + np.random.normal(0.01, 0.008, days)),
|
| 16 |
-
'low': prices * (1 + np.random.normal(-0.01, 0.008, days)),
|
| 17 |
-
'close': prices * (1 + np.random.normal(0, 0.005, days)),
|
| 18 |
-
'volume': np.random.normal(1000000, 200000, days)
|
| 19 |
-
}
|
| 20 |
-
|
| 21 |
-
df = pd.DataFrame(test_data)
|
| 22 |
-
df['high'] = df[['open', 'high', 'close']].max(axis=1)
|
| 23 |
-
df['low'] = df[['open', 'low', 'close']].min(axis=1)
|
| 24 |
-
|
| 25 |
-
# Test indicator analysis
|
| 26 |
-
analyzer = IndicatorAnalyzer()
|
| 27 |
-
indicators = analyzer.analyze_indicators(df)
|
| 28 |
-
print("Generated Indicators:", indicators)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
test_model.py
DELETED
|
@@ -1,34 +0,0 @@
|
|
| 1 |
-
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 2 |
-
import numpy as np
|
| 3 |
-
import pandas as pd
|
| 4 |
-
|
| 5 |
-
# Load model
|
| 6 |
-
model = AutoModelForCausalLM.from_pretrained("codellama/CodeLlama-7b-hf")
|
| 7 |
-
tokenizer = AutoTokenizer.from_pretrained("codellama/CodeLlama-7b-hf")
|
| 8 |
-
|
| 9 |
-
# Create test data
|
| 10 |
-
days = 150
|
| 11 |
-
base_price = 100
|
| 12 |
-
price_changes = np.random.normal(0.001, 0.02, days).cumsum()
|
| 13 |
-
prices = base_price * (1 + price_changes)
|
| 14 |
-
|
| 15 |
-
test_data = pd.DataFrame({
|
| 16 |
-
'open': prices * (1 + np.random.normal(0, 0.005, days)),
|
| 17 |
-
'high': prices * (1 + np.random.normal(0.01, 0.008, days)),
|
| 18 |
-
'low': prices * (1 + np.random.normal(-0.01, 0.008, days)),
|
| 19 |
-
'close': prices * (1 + np.random.normal(0, 0.005, days)),
|
| 20 |
-
'volume': np.random.normal(1000000, 200000, days)
|
| 21 |
-
})
|
| 22 |
-
|
| 23 |
-
# Test pattern detection
|
| 24 |
-
prompt = f"""
|
| 25 |
-
Analyze this OHLCV data and detect patterns:
|
| 26 |
-
{test_data.head().to_string()}
|
| 27 |
-
Return: Pattern type and coordinates
|
| 28 |
-
"""
|
| 29 |
-
|
| 30 |
-
inputs = tokenizer(prompt, return_tensors="pt")
|
| 31 |
-
outputs = model.generate(**inputs, max_length=500)
|
| 32 |
-
result = tokenizer.decode(outputs[0])
|
| 33 |
-
|
| 34 |
-
print("Model Output:", result)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
train.py
DELETED
|
@@ -1,29 +0,0 @@
|
|
| 1 |
-
from transformers import Trainer, TrainingArguments
|
| 2 |
-
from datasets import Dataset
|
| 3 |
-
|
| 4 |
-
def prepare_training_data():
|
| 5 |
-
# Training data structure
|
| 6 |
-
return {
|
| 7 |
-
'pattern_type': ['channel', 'triangle'],
|
| 8 |
-
'chart_code': ['// Channel code', '// Triangle code']
|
| 9 |
-
}
|
| 10 |
-
|
| 11 |
-
def train_model():
|
| 12 |
-
# Create dataset
|
| 13 |
-
data = prepare_training_data()
|
| 14 |
-
dataset = Dataset.from_dict(data)
|
| 15 |
-
|
| 16 |
-
training_args = TrainingArguments(
|
| 17 |
-
output_dir="./results",
|
| 18 |
-
num_train_epochs=3,
|
| 19 |
-
per_device_train_batch_size=8,
|
| 20 |
-
save_steps=500,
|
| 21 |
-
)
|
| 22 |
-
|
| 23 |
-
trainer = Trainer(
|
| 24 |
-
model=model,
|
| 25 |
-
args=training_args,
|
| 26 |
-
train_dataset=dataset,
|
| 27 |
-
)
|
| 28 |
-
|
| 29 |
-
trainer.train()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
trigger_handler.py
DELETED
|
@@ -1,21 +0,0 @@
|
|
| 1 |
-
from chart_maker import ChartMaker
|
| 2 |
-
from fastapi import FastAPI
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
class TriggerHandler:
|
| 6 |
-
def __init__(self):
|
| 7 |
-
self.chart_maker = ChartMaker()
|
| 8 |
-
|
| 9 |
-
async def handle_chart_trigger(self, trigger_data):
|
| 10 |
-
# Extract chart image and OHLCV data
|
| 11 |
-
chart_image = trigger_data['image']
|
| 12 |
-
ohlcv_data = trigger_data['ohlcv']
|
| 13 |
-
|
| 14 |
-
# Generate pattern charts
|
| 15 |
-
pattern_charts = self.chart_maker.generate_all_variations(ohlcv_data)
|
| 16 |
-
|
| 17 |
-
# Return generated charts through API
|
| 18 |
-
return {
|
| 19 |
-
'pattern_charts': pattern_charts,
|
| 20 |
-
'timestamp': trigger_data['timestamp']
|
| 21 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|