Skip to content

Commit

Permalink
Update data embed from token embed (#34)
Browse files Browse the repository at this point in the history
* Update dataembed from tokenembed

* Update dataembed from tokenembed
  • Loading branch information
LongxingTan authored Jun 14, 2023
1 parent 1e3651a commit b6a6faa
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 5 deletions.
2 changes: 1 addition & 1 deletion examples/run_prediction.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def parse_args():
parser.add_argument("--use_data", type=str, default="sine", help="dataset: sine or airpassengers")
parser.add_argument("--train_length", type=int, default=24, help="sequence length for train")
parser.add_argument("--predict_length", type=int, default=12, help="sequence length for predict")
parser.add_argument("--epochs", type=int, default=50, help="Number of training epochs")
parser.add_argument("--epochs", type=int, default=100, help="Number of training epochs")
parser.add_argument("--batch_size", type=int, default=16, help="Batch size for training")
parser.add_argument("--learning_rate", type=float, default=1e-4, help="learning rate for training")

Expand Down
4 changes: 2 additions & 2 deletions tfts/models/informer.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ def __init__(
params.update(custom_model_params)
self.params = params
self.predict_sequence_length = predict_sequence_length
self.encoder_embedding = TokenEmbedding(params["attention_hidden_sizes"])
self.decoder_embedding = TokenEmbedding(params["attention_hidden_sizes"])
self.encoder_embedding = DataEmbedding(params["attention_hidden_sizes"])
self.decoder_embedding = DataEmbedding(params["attention_hidden_sizes"])
if not params["prob_attention"]:
attn_layer = FullAttention(
params["attention_hidden_sizes"], params["num_heads"], params["attention_dropout"]
Expand Down
4 changes: 2 additions & 2 deletions tfts/models/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def __init__(
params.update(custom_model_params)
self.params = params
self.predict_sequence_length = predict_sequence_length
self.encoder_embedding = TokenEmbedding(params["attention_hidden_sizes"])
self.encoder_embedding = DataEmbedding(params["attention_hidden_sizes"])

self.encoder = Encoder(
params["n_encoder_layers"],
Expand All @@ -64,7 +64,7 @@ def __init__(
)

self.decoder = Decoder2(
embed_layer=TokenEmbedding(params["attention_hidden_sizes"]),
embed_layer=DataEmbedding(params["attention_hidden_sizes"]),
att_layers=[
DecoderLayer2(
params["n_decoder_layers"],
Expand Down

0 comments on commit b6a6faa

Please sign in to comment.