Skip to content

Commit

Permalink
add learnable layer class
Browse files Browse the repository at this point in the history
  • Loading branch information
takerum committed Jan 18, 2016
1 parent 5e8a76a commit 8a45de3
Show file tree
Hide file tree
Showing 4 changed files with 12 additions and 6 deletions.
4 changes: 2 additions & 2 deletions source/layers/batch_normalization.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
import theano.tensor as T
import numpy

from layer import Layer
from layer import LearnableLayer

class BatchNormalization(Layer):
class BatchNormalization(LearnableLayer):

def __init__(self,size,moving_avg_ratio=0.9,initial_gamma=None,initial_beta=None):

Expand Down
4 changes: 2 additions & 2 deletions source/layers/conv_2d.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import numpy
import collections
from theano.tensor.nnet import conv
from layer import Layer
from layer import LearnableLayer

from pylearn2.sandbox.cuda_convnet.filter_acts import FilterActs
from theano.sandbox.cuda.basic_ops import gpu_contiguous
Expand All @@ -13,7 +13,7 @@ def _pair(x):
return x
return (x, x)

class Conv_2D(Layer):
class Conv_2D(LearnableLayer):

def __init__(self,in_maps,out_maps,filter_size,border_mode='valid',stride=1,use_bias=True,initial_W=None,initial_b=None,use_pylearn=True):
"""
Expand Down
6 changes: 6 additions & 0 deletions source/layers/layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,9 @@ def __call__(self, input):

def forward(self,input):
raise NotImplementedError()


class LearnableLayer(Layer):

def __init__(self):
self.params = None
4 changes: 2 additions & 2 deletions source/layers/linear.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
import theano.tensor as T
import numpy

from layer import Layer
from layer import LearnableLayer

class Linear(Layer):
class Linear(LearnableLayer):

def __init__(self,size,use_bias=True,initial_W=None,initial_b=None):
self.use_bias = use_bias
Expand Down

0 comments on commit 8a45de3

Please sign in to comment.