Skip to content

Commit

Permalink
Merge pull request #69 from n1analytics/learning_rate_fix_for_federat…
Browse files Browse the repository at this point in the history
…ed_learning

make learning rate independent of dataset size
  • Loading branch information
wilko77 authored Sep 11, 2018
2 parents 52fe9b0 + 9502c41 commit fe24881
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions examples/federated_learning_with_encryption.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ def compute_gradient(self):
"""Compute the gradient of the current model using the training set
"""
delta = self.predict(self.X) - self.y
return delta.dot(self.X)
return delta.dot(self.X) / len(self.X)

def predict(self, X):
"""Score test data"""
Expand Down Expand Up @@ -214,7 +214,7 @@ def federated_learning(X, y, X_test, y_test, config):

# Compute gradients, encrypt and aggregate
encrypt_aggr = clients[0].encrypted_gradient(sum_to=None)
for c in clients:
for c in clients[1:]:
encrypt_aggr = c.encrypted_gradient(sum_to=encrypt_aggr)

# Send aggregate to server and decrypt it
Expand Down Expand Up @@ -253,10 +253,10 @@ def local_learning(X, y, X_test, y_test, config):

if __name__ == '__main__':
config = {
'n_clients': 3,
'n_clients': 5,
'key_length': 1024,
'n_iter': 50,
'eta': 0.01,
'eta': 1.5,
}
# load data, train/test split and split training data between clients
X, y, X_test, y_test = get_data(n_clients=config['n_clients'])
Expand Down

0 comments on commit fe24881

Please sign in to comment.