Skip to content

Commit

Permalink
adapt models to new preprocessor
Browse files Browse the repository at this point in the history
  • Loading branch information
AnFreTh committed Dec 3, 2024
1 parent c6b266f commit a35f5c0
Show file tree
Hide file tree
Showing 7 changed files with 18 additions and 25 deletions.
8 changes: 8 additions & 0 deletions mambular/base_models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@
from .tabtransformer import TabTransformer
from .mambatab import MambaTab
from .mambattn import MambAttn
from .cnn import CNN
from .node import NODE
from .trem import TREM
from .tabm import TabM

__all__ = [
"TaskModel",
Expand All @@ -18,4 +22,8 @@
"BaseModel",
"MambaTab",
"MambAttn",
"CNN",
"TabM",
"NODE",
"TREM",
]
2 changes: 0 additions & 2 deletions mambular/base_models/mambatab.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,6 @@ def __init__(

self.axis = config.axis

head_activation = self.hparams.head_activation

self.tabular_head = MLPhead(
input_dim=self.hparams.d_model,
config=config,
Expand Down
7 changes: 2 additions & 5 deletions mambular/base_models/mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from ..configs.mlp_config import DefaultMLPConfig
from .basemodel import BaseModel
from ..arch_utils.layer_utils.embedding_layer import EmbeddingLayer
from ..utils.get_feature_dimensions import get_feature_dimensions


class MLP(BaseModel):
Expand Down Expand Up @@ -73,11 +74,7 @@ def __init__(
# Initialize layers
self.layers = nn.ModuleList()

input_dim = 0
for feature_name, input_shape in num_feature_info.items():
input_dim += input_shape
for feature_name, input_shape in cat_feature_info.items():
input_dim += 1
input_dim = get_feature_dimensions(num_feature_info, cat_feature_info)

if self.hparams.use_embeddings:
self.embedding_layer = EmbeddingLayer(
Expand Down
7 changes: 2 additions & 5 deletions mambular/base_models/ndtf.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from .basemodel import BaseModel
from ..arch_utils.neural_decision_tree import NeuralDecisionTree
import numpy as np
from ..utils.get_feature_dimensions import get_feature_dimensions


class NDTF(BaseModel):
Expand Down Expand Up @@ -66,11 +67,7 @@ def __init__(
self.num_feature_info = num_feature_info
self.returns_ensemble = False

input_dim = 0
for feature_name, input_shape in num_feature_info.items():
input_dim += input_shape
for feature_name, input_shape in cat_feature_info.items():
input_dim += 1
input_dim = get_feature_dimensions(num_feature_info, cat_feature_info)

self.input_dimensions = [input_dim]

Expand Down
7 changes: 2 additions & 5 deletions mambular/base_models/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from ..arch_utils.layer_utils.embedding_layer import EmbeddingLayer
from ..arch_utils.node_utils import DenseBlock
from ..arch_utils.mlp_utils import MLPhead
from ..utils.get_feature_dimensions import get_feature_dimensions


class NODE(BaseModel):
Expand Down Expand Up @@ -75,11 +76,7 @@ def __init__(
self.embedding_layer = EmbeddingLayer(config)

else:
input_dim = 0
for feature_name, input_shape in num_feature_info.items():
input_dim += input_shape
for feature_name, input_shape in cat_feature_info.items():
input_dim += 1
input_dim = get_feature_dimensions(num_feature_info, cat_feature_info)

self.d_out = num_classes
self.block = DenseBlock(
Expand Down
7 changes: 2 additions & 5 deletions mambular/base_models/resnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from .basemodel import BaseModel
from ..arch_utils.resnet_utils import ResidualBlock
from ..arch_utils.layer_utils.embedding_layer import EmbeddingLayer
from ..utils.get_feature_dimensions import get_feature_dimensions


class ResNet(BaseModel):
Expand Down Expand Up @@ -83,11 +84,7 @@ def __init__(
)

else:
input_dim = 0
for feature_name, input_shape in num_feature_info.items():
input_dim += input_shape
for feature_name, input_shape in cat_feature_info.items():
input_dim += 1
input_dim = get_feature_dimensions(num_feature_info, cat_feature_info)

self.initial_layer = nn.Linear(input_dim, self.hparams.layer_sizes[0])

Expand Down
5 changes: 2 additions & 3 deletions mambular/base_models/tabm.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from ..arch_utils.layer_utils.embedding_layer import EmbeddingLayer
from ..arch_utils.layer_utils.batch_ensemble_layer import LinearBatchEnsembleLayer
from ..arch_utils.layer_utils.sn_linear import SNLinear
from ..utils.get_feature_dimensions import get_feature_dimensions


class TabM(BaseModel):
Expand Down Expand Up @@ -46,9 +47,7 @@ def __init__(
) * config.d_model

else:
# Calculate input dimension
input_dim = sum(input_shape for input_shape in num_feature_info.values())
input_dim += len(cat_feature_info)
input_dim = get_feature_dimensions(num_feature_info, cat_feature_info)

# Input layer with batch ensembling
self.layers.append(
Expand Down

0 comments on commit a35f5c0

Please sign in to comment.