File size: 1,664 Bytes
03ce4c5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import gradio as gr
import torch
import cv2
import numpy as np
import pandas as pd
from scipy.signal import find_peaks, savgol_filter
from collections import Counter
from tqdm import tqdm
import time
import os
import torch
import torch.nn as nn
import torch.fft as fft
import xgboost as xgb
from torch.utils.data import DataLoader, TensorDataset
import time

# Define the TCN model
class TCN(nn.Module):
    def __init__(self, input_size, hidden_size, output_size, num_layers=3, dropout=0.1):
        super(TCN, self).__init__()
        
        # List to hold convolutional layers
        self.convs = nn.ModuleList()
        dropout = dropout if num_layers > 1 else 0  # No dropout if only one layer
        self.dropout = nn.Dropout(dropout)
        
        # Create the convolutional layers
        for i in range(num_layers):
            in_channels = input_size if i == 0 else hidden_size  # First layer uses input_size, others use hidden_size
            out_channels = hidden_size  # All layers have the same hidden size
            self.convs.append(nn.Conv1d(in_channels, out_channels, kernel_size=2, padding=1))
        
        # Fully connected output layer
        self.fc = nn.Linear(hidden_size, output_size)

    def forward(self, x):
        x = x.permute(0, 2, 1)  # Change to (batch_size, features, timesteps)
        
        # Apply each convolutional layer followed by dropout
        for conv in self.convs:
            x = torch.relu(conv(x))
            x = self.dropout(x)  # Apply dropout after each convolution
        
        x = torch.mean(x, dim=2)  # Global average pooling
        x = self.fc(x)  # Output layer
        return x