Skip to content

Commit

Permalink
Resolve #7 and #16
Browse files Browse the repository at this point in the history
  • Loading branch information
sigma1326 committed Sep 3, 2022
1 parent 60e75d1 commit 2b5ba69
Show file tree
Hide file tree
Showing 16 changed files with 27 additions and 23 deletions.
3 changes: 1 addition & 2 deletions examples/eng2fr_translation.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
# **NOTE**: This example will be heavily edited, hence, this is not an official part of the library at this time

from data_loader import BaseDataset
from layers.transformer_decoder import TransformerDecoder
from layers.transformer_encoder import TransformerEncoder
from training.base import Transformer, Trainer
from transformerx.layers import TransformerEncoder, TransformerDecoder

depth, n_blocks, dropout = 256, 2, 0.2
ffn_num_hiddens, num_heads = 64, 4
Expand Down
Empty file removed layers/__init__.py
Empty file.
15 changes: 5 additions & 10 deletions test_main.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,12 @@
import os

import numpy as np
import pytest
import tensorflow as tf

from layers.addnorm import AddNorm
from layers.multihead_attention import MultiHeadAttention
from layers.positional_encoding import PositionalEncoding
from layers.positionwise_ffn import PositionWiseFFN
from layers.transformer_decoder_block import TransformerDecoderBlock
from layers.transformer_encoder import TransformerEncoder
from layers.transformer_encoder_block import TransformerEncoderBlock
from txplot.plot_pe import Plot
from transformerx.layers import (
MultiHeadAttention, PositionalEncoding, PositionWiseFFN, AddNorm,
TransformerEncoderBlock, TransformerEncoder, TransformerDecoderBlock,
)
from transformerx.txplot import Plot


@pytest.fixture()
Expand Down
9 changes: 9 additions & 0 deletions transformerx/layers/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from .addnorm import AddNorm
from .dot_product_attention import DotProductAttention
from .multihead_attention import MultiHeadAttention
from .positional_encoding import PositionalEncoding
from .positionwise_ffn import PositionWiseFFN
from .transformer_decoder import TransformerDecoder
from .transformer_decoder_block import TransformerDecoderBlock
from .transformer_encoder import TransformerEncoder
from .transformer_encoder_block import TransformerEncoderBlock
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import tensorflow as tf
from einops import rearrange

from layers.dot_product_attention import DotProductAttention
from transformerx.layers.dot_product_attention import DotProductAttention


class MultiHeadAttention(tf.keras.layers.Layer):
Expand Down
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import tensorflow as tf

from layers.positional_encoding import PositionalEncoding
from layers.transformer_decoder_block import TransformerDecoderBlock
from transformerx.layers.positional_encoding import PositionalEncoding
from transformerx.layers.transformer_decoder_block import TransformerDecoderBlock


class TransformerDecoder(tf.keras.layers.Layer):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import tensorflow as tf

from layers.addnorm import AddNorm
from layers.multihead_attention import MultiHeadAttention
from layers.positionwise_ffn import PositionWiseFFN
from transformerx.layers.addnorm import AddNorm
from transformerx.layers.multihead_attention import MultiHeadAttention
from transformerx.layers.positionwise_ffn import PositionWiseFFN


class TransformerDecoderBlock(tf.keras.layers.Layer):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import tensorflow as tf

from layers.positional_encoding import PositionalEncoding
from layers.transformer_encoder_block import TransformerEncoderBlock
from transformerx.layers.positional_encoding import PositionalEncoding
from transformerx.layers.transformer_encoder_block import TransformerEncoderBlock


class TransformerEncoder(tf.keras.layers.Layer):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import tensorflow as tf

from layers.addnorm import AddNorm
from layers.multihead_attention import MultiHeadAttention
from layers.positionwise_ffn import PositionWiseFFN
from transformerx.layers.addnorm import AddNorm
from transformerx.layers.multihead_attention import MultiHeadAttention
from transformerx.layers.positionwise_ffn import PositionWiseFFN


class TransformerEncoderBlock(tf.keras.layers.Layer):
Expand Down
1 change: 1 addition & 0 deletions transformerx/txplot/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from .plot_pe import Plot
File renamed without changes.
Empty file removed txplot/__init__.py
Empty file.

0 comments on commit 2b5ba69

Please sign in to comment.