|
from transformers import PretrainedConfig |
|
|
|
class FlowformerConfig(PretrainedConfig): |
|
def __init__(self, |
|
dim_hidden: int=32, |
|
num_heads: int=4, |
|
num_inds: int=16, |
|
hidden_layers: int=3, |
|
layer_norm: bool=True, |
|
dim_input: int=11, |
|
markers: list=["TIME", "FSC-A", "FSC-W", "SSC-A", "CD20", "CD10", "CD45", "CD34", "CD19", "CD38", "SY41"], |
|
**kwargs |
|
): |
|
assert dim_input == len(markers), "dim_input must be equal to the number of markers" |
|
|
|
self.dim_hidden = dim_hidden |
|
self.num_heads = num_heads |
|
self.num_inds = num_inds |
|
self.hidden_layers = hidden_layers |
|
self.layer_norm = layer_norm |
|
self.dim_input = dim_input |
|
self.markers = markers |
|
super().__init__(**kwargs) |