Skip to content

Commit

Permalink
feat: add linear layer to lstm network
Browse files Browse the repository at this point in the history
  • Loading branch information
glencoe committed Mar 14, 2023
1 parent a2e08ec commit 48982f0
Show file tree
Hide file tree
Showing 14 changed files with 258 additions and 103 deletions.
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from abc import abstractmethod
from typing import Protocol, Sequence
from typing import Protocol, Sequence, overload

from elasticai.creator.hdl.code_generation.code_generation import to_hex

Expand Down
3 changes: 1 addition & 2 deletions elasticai/creator/hdl/vhdl/designs/fp_linear_1d.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
module_to_package,
)
from elasticai.creator.hdl.design_base.design import Design, Port
from elasticai.creator.hdl.design_base.ports import create_port_for_buffered_design
from elasticai.creator.hdl.design_base.ports import (
create_port_for_buffered_design as create_port,
)
Expand Down Expand Up @@ -69,4 +68,4 @@ def save_to(self, destination: Path):
resource_option=f'"{self.resource_option}"',
**self._template_parameters(),
)
destination.as_file(f"{self.name}").write_text(template.lines())
destination.as_file(f".vhd").write_text(template.lines())
2 changes: 1 addition & 1 deletion elasticai/creator/hdl/vhdl/designs/sequential.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ def save_to(self, destination: Path):
x_width=str(self._x_width),
y_width=str(self._y_width),
)
target_file = destination.as_file(".vhd")
target_file = destination.create_subpath("sequential").as_file(".vhd")
target_file.write_text(network_implementation.lines())


Expand Down
32 changes: 10 additions & 22 deletions elasticai/creator/in_memory_path.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,32 +21,20 @@ def __init__(self, name: str, parent: Optional["InMemoryPath"]):

def as_file(self, suffix: str) -> InMemoryFile:
file = InMemoryFile(f"{self.name}{suffix}")
if len(self.children) > 0:
raise ValueError(
"non empty path {}, present children: {}".format(
self.name, ", ".join(self.children)
)
)
if self.parent is not None:
self.parent.children[self.name] = file
return file

def __getitem__(self, item):
return self.children[item]

def create_subpath(self, subpath_name: str) -> "InMemoryPath":
subpath = InMemoryPath(subpath_name, self)
subpath = InMemoryPath(name=subpath_name, parent=self)
self.children[subpath_name] = subpath
return subpath


class InMemoryPathForTesting(Path):
def __init__(self, subpath_name: str):
self.root = InMemoryPath("root", parent=None)
self._subpath_name = subpath_name
self._suffix = ""
self._subpath = self.root.create_subpath(self._subpath_name)

def create_subpath(self, subpath_name: str) -> "Path":
return self._subpath.create_subpath(subpath_name)

def as_file(self, suffix: str) -> File:
self._suffix = suffix
return self._subpath.as_file(suffix)

@property
def text(self) -> list[str]:
child = self.root.children[f"{self._subpath_name}"]
assert isinstance(child, InMemoryFile)
return child.text
5 changes: 3 additions & 2 deletions elasticai/creator/nn/linear.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,8 @@ def __init__(
self,
in_features: int,
out_features: int,
config: FixedPointConfig,
total_bits: int,
frac_bits: int,
bias: bool,
device: Any = None,
) -> None:
Expand All @@ -56,7 +57,7 @@ def __init__(
out_features=out_features,
arithmetics=FixedPointArithmetics(
config=TwoComplementFixedPointConfig(
total_bits=config.total_bits, frac_bits=config.total_bits
total_bits=total_bits, frac_bits=frac_bits
)
),
bias=bias,
Expand Down
4 changes: 2 additions & 2 deletions elasticai/creator/translatable_modules/vhdl/fp_linear_1d.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
from elasticai.creator.hdl.translatable import Saveable
from elasticai.creator.hdl.design_base.design import Design
from elasticai.creator.hdl.vhdl.designs.fp_linear_1d import FPLinear1d as FPLinearDesign
from elasticai.creator.nn.linear import FixedPointLinear


class FPLinear1d(FixedPointLinear):
def translate(self) -> Saveable:
def translate(self) -> Design:
return FPLinearDesign(
frac_bits=self.frac_bits,
total_bits=self.total_bits,
Expand Down
20 changes: 14 additions & 6 deletions elasticai/creator/translatable_modules/vhdl/lstm/design.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,23 +12,30 @@
from elasticai.creator.hdl.design_base.design import Design, Port
from elasticai.creator.hdl.design_base.signal import Signal
from elasticai.creator.hdl.translatable import Path
from elasticai.creator.hdl.vhdl.designs.fp_linear_1d import FPLinear1d


class LSTMNetworkDesign(Design):
def save_to(self, destination: "Path"):
self._lstm.save_to(destination.create_subpath("lstm_cell"))
for index, layer in enumerate(self._linear_layers):
layer.save_to(destination.create_subpath(f"fp_linear_1d_{index}"))
expander = TemplateExpander(self.config)
destination.as_file(".vhd").write_text(expander.lines())
destination.create_subpath("lstm_network").as_file(".vhd").write_text(
expander.lines()
)

def __init__(
self,
lstm: Design,
linear_layers: list[FPLinear1d],
total_bits: int,
frac_bits: int,
hidden_size: int,
input_size: int,
):
super().__init__(name="lstm_network")
self._linear_layers = linear_layers
signal = partial(Signal)
self._lstm = lstm
ctrl_signal = partial(Signal, width=0)
Expand All @@ -40,8 +47,8 @@ def __init__(
frac_width=str(frac_bits),
hidden_size=str(hidden_size),
input_size=str(input_size),
linear_in_features="20",
linear_out_features="1",
linear_in_features=str(self._linear_layers[0].in_feature_num),
linear_out_features=str(self._linear_layers[0].out_feature_num),
hidden_addr_width=(
f"{calculate_address_width(hidden_size + input_size)}"
),
Expand Down Expand Up @@ -78,8 +85,9 @@ def __init__(self, data_width, data_address_width):

def _create_port(self, data_width: int, data_address_width: int) -> Port:
_signal = Signal
ctr_signal = partial(Signal, width=0)
incoming_control_signals = [
_signal(name=name, width=0)
ctr_signal(name=name)
for name in ("clock", "clk_hadamard", "reset", "rd", "wr")
]
incoming_data_signals = [
Expand All @@ -92,8 +100,8 @@ def _create_port(self, data_width: int, data_address_width: int) -> Port:
_signal(name="led_ctrl", width=4),
]
outgoing_control_signals = [
_signal(name="busy", width=0),
_signal(name="wake_up", width=0),
ctr_signal(name="busy"),
ctr_signal(name="wake_up"),
]
return Port(
incoming=incoming_data_signals + incoming_control_signals,
Expand Down
39 changes: 39 additions & 0 deletions elasticai/creator/translatable_modules/vhdl/lstm/fp_hard_tanh.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
from elasticai.creator.hdl.code_generation.abstract_base_template import (
TemplateConfig,
TemplateExpander,
module_to_package,
)
from elasticai.creator.hdl.design_base.design import Design, Port
from elasticai.creator.hdl.design_base.signal import Signal
from elasticai.creator.hdl.translatable import Path
from elasticai.creator.nn._two_complement_fixed_point_config import (
TwoComplementFixedPointConfig,
)


class FPHardTanh(Design):
def __init__(self, total_bits: int, frac_bits: int):
super().__init__(name="hardtanh")
self._data_width = total_bits
fp_config = TwoComplementFixedPointConfig(
frac_bits=frac_bits, total_bits=total_bits
)
self._template = TemplateConfig(
package=module_to_package(self.__module__),
file_name="fp_hard_tanh.tpl.vhd",
parameters=dict(
data_width=str(self._data_width),
one=str(fp_config.as_integer(1)),
minus_one=str(fp_config.as_integer(-1)),
),
)

def save_to(self, destination: "Path"):
destination.as_file(".vhd").write_text(TemplateExpander(self._template).lines())

@property
def port(self) -> Port:
return Port(
incoming=[Signal("x", self._data_width)],
outgoing=[Signal("y", self._data_width)],
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
-- A LUT version of tanh
library ieee;
use ieee.std_logic_1164.all;
use ieee.numeric_std.all; -- for type conversions

entity tanh is
port (
x : in signed({data_width}-1 downto 0);
y : out signed({data_width}-1 downto 0)
);

end tanh;

architecture rtl of tanh is
begin

tanh_process:process(x)
begin
if x<=-16 then
y <= to_signed({minus_one}, y'length);
elsif x<16 then
y <= x;
else
y <= to_signed({one}, y'length);
end if;
end process;
end rtl;
69 changes: 36 additions & 33 deletions elasticai/creator/translatable_modules/vhdl/lstm/fp_lstm_cell.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from copy import copy
from functools import partial
from itertools import chain, repeat
from itertools import repeat
from typing import Any, cast

import numpy as np
Expand All @@ -17,14 +17,14 @@
from elasticai.creator.hdl.design_base.design import Design, Port
from elasticai.creator.hdl.design_base.signal import Signal
from elasticai.creator.hdl.translatable import Path
from elasticai.creator.hdl.vhdl.code_generation import to_vhdl_hex_string
from elasticai.creator.hdl.vhdl.code_generation.code_generation import (
generate_hex_for_rom,
)
from elasticai.creator.hdl.vhdl.code_generation.template import Template
from elasticai.creator.hdl.vhdl.designs import HardSigmoid
from elasticai.creator.nn._two_complement_fixed_point_config import (
TwoComplementFixedPointConfig,
)
from elasticai.creator.translatable_modules.vhdl.lstm.fp_hard_tanh import FPHardTanh


class FPLSTMCell(Design):
Expand Down Expand Up @@ -143,8 +143,29 @@ def save_to(self, destination: "Path"):
parameters=biases,
address_width=self._hidden_addr_width,
)
self._save_dual_port_double_clock_ram(destination)
self._save_hardtanh(destination)
self._save_sigmoid(destination)
expander = TemplateExpander(self._config)
destination.as_file(".vhd").write_text(expander.lines())
destination.create_subpath("lstm_cell").as_file(".vhd").write_text(
expander.lines()
)

def _save_sigmoid(self, destination: Path):
sigmoid_destination = destination.create_subpath("hard_sigmoid")
sigmoid = HardSigmoid(
width=self.total_bits,
lower_bound_for_zero=self._fp_config.as_integer(-3),
upper_bound_for_one=self._fp_config.as_integer(3),
)
sigmoid.save_to(sigmoid_destination)

def _save_hardtanh(self, destination: Path):
hardtanh_destination = destination.create_subpath("hard_tanh")
hardtanh = FPHardTanh(
total_bits=self.total_bits, frac_bits=self._fp_config.frac_bits
)
hardtanh.save_to(hardtanh_destination)

def _write_files(
self,
Expand Down Expand Up @@ -182,34 +203,16 @@ def _pad_with_zeros(values, address_width):
suffix = list(repeat(0, 2**address_width - len(values)))
return values + suffix


class _DualPortDoubleClockRom:
def __init__(
self,
data_width: int,
values: list[int],
name: str,
resource_option: str,
) -> None:
self.name = name
self.resource_option = resource_option
self.data_width = data_width
self.addr_width = calculate_address_width(len(values))
padded_values = chain(values, repeat(0, 2**self.addr_width))

def to_hex(number: int) -> str:
return to_vhdl_hex_string(number=number, bit_width=self.data_width)

self.hex_values = list(map(to_hex, padded_values))

def lines(self) -> list[str]:
template = Template(base_name="rom")
template.update_parameters(
name=self.name,
rom_addr_bitwidth=str(self.addr_width),
rom_data_bitwidth=str(self.data_width),
rom_value=",".join(self.hex_values),
rom_resource_option=f'"{self.resource_option}"',
def _save_dual_port_double_clock_ram(self, destination: Path):
template_configuration = TemplateConfig(
file_name="dual_port_2_clock_ram.tpl.vhd",
package=module_to_package(self.__module__),
parameters=dict(
name=f"{self.name}_dual_port_2_clock_ram",
),
)
template_expansion = TemplateExpander(template_configuration)

return template.lines()
destination.create_subpath(f"{self.name}_dual_port_2_clock_ram").as_file(
".vhd"
).write_text(template_expansion.lines())
11 changes: 10 additions & 1 deletion elasticai/creator/translatable_modules/vhdl/lstm/lstm.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,11 @@
import torch.nn

from elasticai.creator.hdl.design_base.design import Design
from elasticai.creator.hdl.vhdl.designs.fp_linear_1d import (
FPLinear1d as _FPLinear1dDesign,
)
from elasticai.creator.nn.lstm import FixedPointLSTMWithHardActivations as _nnLSTM
from elasticai.creator.translatable_modules.vhdl.fp_linear_1d import FPLinear1d
from elasticai.creator.translatable_modules.vhdl.lstm.design import LSTMNetworkDesign
from elasticai.creator.translatable_modules.vhdl.lstm.fp_lstm_cell import FPLSTMCell
from elasticai.creator.translatable_modules.vhdl.module import Module
Expand All @@ -24,8 +28,13 @@ def translate(self) -> LSTMNetworkDesign:
frac_bits = first_lstm.fixed_point_config.frac_bits
hidden_size = first_lstm.hidden_size
input_size = first_lstm.input_size
follow_up_linear_layers = cast(
list[_FPLinear1dDesign],
[cast(FPLinear1d, layer).translate() for layer in children[1:]],
)
return LSTMNetworkDesign(
first_lstm.translate(),
lstm=first_lstm.translate(),
linear_layers=follow_up_linear_layers,
total_bits=total_bits,
frac_bits=frac_bits,
hidden_size=hidden_size,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ begin

-- linear_x_data <= std_logic_vector(to_signed(test_x_ram(to_integer(unsigned(linear_read_addr))),d_out'length));

i_linear_layer : entity xil_defaultlib.fp_linear_1d_1(rtl)
i_linear_layer : entity xil_defaultlib.fp_linear_1d_0(rtl)
port map (
clock => clock,
enable => linear_enable,
Expand Down
Loading

0 comments on commit 48982f0

Please sign in to comment.