forked from neptune-ai/neptune-examples
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Neptune-PyTorch-Lightning-basic.py
70 lines (47 loc) · 1.65 KB
/
Neptune-PyTorch-Lightning-basic.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
# PyTorch Lightning 1.x + Neptune [Basic Example]
# Before you start
## Install dependencies
# Step 1: Import Libraries
import os
import torch
from torch.nn import functional as F
from torch.utils.data import DataLoader
from torchvision.datasets import MNIST
from torchvision import transforms
import pytorch_lightning as pl
# Step 2: Define Hyper-Parameters
PARAMS = {'max_epochs': 3,
'learning_rate': 0.005,
'batch_size': 32}
# Step 3: Define LightningModule and DataLoader
# pl.LightningModule
class LitModel(pl.LightningModule):
def __init__(self):
super().__init__()
self.l1 = torch.nn.Linear(28 * 28, 10)
def forward(self, x):
return torch.relu(self.l1(x.view(x.size(0), -1)))
def training_step(self, batch, batch_idx):
x, y = batch
y_hat = self(x)
loss = F.cross_entropy(y_hat, y)
self.log('train_loss', loss)
return loss
def configure_optimizers(self):
return torch.optim.Adam(self.parameters(), lr=PARAMS['learning_rate'])
# DataLoader
train_loader = DataLoader(MNIST(os.getcwd(), download=True, transform=transforms.ToTensor()),
batch_size=PARAMS['batch_size'])
# Step 4: Create NeptuneLogger
from pytorch_lightning.loggers.neptune import NeptuneLogger
neptune_logger = NeptuneLogger(
api_key="ANONYMOUS",
project_name="shared/pytorch-lightning-integration",
params=PARAMS)
# Step 5: Pass NeptuneLogger to the Trainer
trainer = pl.Trainer(max_epochs=PARAMS['max_epochs'],
logger=neptune_logger)
# Step 6: Run experiment
model = LitModel()
trainer.fit(model, train_loader)
# Explore Results