Skip to content

Commit

Permalink
chore: change all Keras imports by tensorflow.keras imports to be com…
Browse files Browse the repository at this point in the history
…patible with Keras-2.4 and tensorflow-2.3.

This does not break previous requirements or installations as Keras is part of TF since tensorflow-2.0.
Fixes: #35
  • Loading branch information
levaphenyl committed Dec 2, 2020
1 parent de38dab commit 161bf0d
Show file tree
Hide file tree
Showing 7 changed files with 30 additions and 28 deletions.
1 change: 1 addition & 0 deletions conda/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,5 @@ requirements:
- python>=3.6
- nilmtk>=0.4
- keras>=2.2.4
- tensorflow>=2.0
- cvxpy>=1.0.0
12 changes: 6 additions & 6 deletions nilmtk_contrib/disaggregate/WindowGRU.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,12 @@
import os
import pickle

from keras.models import Sequential
from keras.layers import Dense, Conv1D, GRU, Bidirectional, Dropout
from keras.utils import plot_model
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Conv1D, GRU, Bidirectional, Dropout
from tensorflow.keras.utils import plot_model
from sklearn.model_selection import train_test_split
from keras.callbacks import ModelCheckpoint
import keras.backend as K
from tensorflow.keras.callbacks import ModelCheckpoint
import tensorflow.keras.backend as K
from nilmtk.utils import find_nearest
from nilmtk.feature_detectors import cluster
from nilmtk.disaggregate import Disaggregator
Expand Down Expand Up @@ -188,4 +188,4 @@ def return_network(self):
model.add(Dropout(0.5))
model.add(Dense(1, activation='linear'))
model.compile(loss='mse', optimizer='adam')
return model
return model
10 changes: 5 additions & 5 deletions nilmtk_contrib/disaggregate/dae.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
from __future__ import print_function, division
from warnings import warn
from nilmtk.disaggregate import Disaggregator
from keras.layers import Conv1D, Dense, Dropout, Reshape, Flatten
from tensorflow.keras.layers import Conv1D, Dense, Dropout, Reshape, Flatten
import pandas as pd
import numpy as np
from collections import OrderedDict
from keras.optimizers import SGD
from keras.models import Sequential
from tensorflow.keras.optimizers import SGD
from tensorflow.keras.models import Sequential
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from keras.callbacks import ModelCheckpoint
import keras.backend as K
from tensorflow.keras.callbacks import ModelCheckpoint
import tensorflow.keras.backend as K
from statistics import mean
import os
import pickle
Expand Down
12 changes: 6 additions & 6 deletions nilmtk_contrib/disaggregate/rnn.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
from __future__ import print_function, division
from warnings import warn
from nilmtk.disaggregate import Disaggregator
from keras.layers import Conv1D, Dense, Dropout, Reshape, Flatten, Bidirectional, LSTM
from tensorflow.keras.layers import Conv1D, Dense, Dropout, Reshape, Flatten, Bidirectional, LSTM
import os
import pickle
import pandas as pd
import numpy as np
from collections import OrderedDict
from keras.optimizers import SGD
from keras.models import Sequential, load_model
from tensorflow.keras.optimizers import SGD
from tensorflow.keras.models import Sequential, load_model
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from keras.callbacks import ModelCheckpoint
import keras.backend as K
from tensorflow.keras.callbacks import ModelCheckpoint
import tensorflow.keras.backend as K
import random
import sys
random.seed(10)
Expand Down Expand Up @@ -192,4 +192,4 @@ def set_appliance_params(self,train_appliances):
app_std = 100
self.appliance_params.update({app_name:{'mean':app_mean,'std':app_std}})
print (self.appliance_params)


12 changes: 6 additions & 6 deletions nilmtk_contrib/disaggregate/seq2point.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
from __future__ import print_function, division
from warnings import warn
from nilmtk.disaggregate import Disaggregator
from keras.layers import Conv1D, Dense, Dropout, Reshape, Flatten
from tensorflow.keras.layers import Conv1D, Dense, Dropout, Reshape, Flatten
import os
import pickle
import pandas as pd
import numpy as np
from collections import OrderedDict
from keras.optimizers import SGD
from keras.models import Sequential, load_model
from tensorflow.keras.optimizers import SGD
from tensorflow.keras.models import Sequential, load_model
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from keras.callbacks import ModelCheckpoint
import keras.backend as K
from tensorflow.keras.callbacks import ModelCheckpoint
import tensorflow.keras.backend as K
import random
import sys
random.seed(10)
Expand Down Expand Up @@ -188,4 +188,4 @@ def set_appliance_params(self,train_appliances):
if app_std<1:
app_std = 100
self.appliance_params.update({app_name:{'mean':app_mean,'std':app_std}})
print (self.appliance_params)
print (self.appliance_params)
10 changes: 5 additions & 5 deletions nilmtk_contrib/disaggregate/seq2seq.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,20 @@
from warnings import warn

from nilmtk.disaggregate import Disaggregator
from keras.layers import Conv1D, Dense, Dropout, Reshape, Flatten
from tensorflow.keras.layers import Conv1D, Dense, Dropout, Reshape, Flatten

import os
import pandas as pd
import numpy as np
import pickle
from collections import OrderedDict

from keras.optimizers import SGD
from keras.models import Sequential, load_model
from tensorflow.keras.optimizers import SGD
from tensorflow.keras.models import Sequential, load_model
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from keras.callbacks import ModelCheckpoint
import keras.backend as K
from tensorflow.keras.callbacks import ModelCheckpoint
import tensorflow.keras.backend as K
import random
random.seed(10)
np.random.seed(10)
Expand Down
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ def write_version_py(filename=None):
install_requires=[
'nilmtk>=0.4',
'keras>=2.2.4',
'tensorflow>=2.0',
'cvxpy>=1.0.0'
],
description="State-of-the-art algorithms for the task of energy disaggregation implemented using NILMTK's Rapid Experimentation API",
Expand Down

0 comments on commit 161bf0d

Please sign in to comment.