Skip to content

Commit

Permalink
Add code to control finetune layers (babysor#154)
Browse files Browse the repository at this point in the history
  • Loading branch information
babysor authored Oct 23, 2021
1 parent 31bc665 commit 724194a
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 3 deletions.
4 changes: 3 additions & 1 deletion synthesizer/hparams.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,11 @@ def parse(self, string):
tts_clip_grad_norm = 1.0, # clips the gradient norm to prevent explosion - set to None if not needed
tts_eval_interval = 500, # Number of steps between model evaluation (sample generation)
# Set to -1 to generate after completing epoch, or 0 to disable

tts_eval_num_samples = 1, # Makes this number of samples

## For finetune usage, if set, only selected layers will be trained, available: encoder,encoder_proj,gst,decoder,postnet,post_proj
tts_finetune_layers = [],

### Data Preprocessing
max_mel_frames = 900,
rescale = True,
Expand Down
9 changes: 9 additions & 0 deletions synthesizer/models/tacotron.py
Original file line number Diff line number Diff line change
Expand Up @@ -496,6 +496,15 @@ def init_model(self):
for p in self.parameters():
if p.dim() > 1: nn.init.xavier_uniform_(p)

def finetune_partial(self, whitelist_layers):
self.zero_grad()
for name, child in self.named_children():
if name in whitelist_layers:
print("Trainable Layer: %s" % name)
print("Trainable Parameters: %.3f" % sum([np.prod(p.size()) for p in child.parameters()]))
for param in child.parameters():
param.requires_grad = False

def get_step(self):
return self.step.data.item()

Expand Down
5 changes: 3 additions & 2 deletions synthesizer/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def train(run_id: str, syn_dir: str, models_dir: str, save_every: int,
speaker_embedding_size=hparams.speaker_embedding_size).to(device)

# Initialize the optimizer
optimizer = optim.Adam(model.parameters())
optimizer = optim.Adam(model.parameters(), amsgrad=True)

# Load the weights
if force_restart or not weights_fpath.exists():
Expand Down Expand Up @@ -146,7 +146,6 @@ def train(run_id: str, syn_dir: str, models_dir: str, save_every: int,
continue

model.r = r

# Begin the training
simple_table([(f"Steps with r={r}", str(training_steps // 1000) + "k Steps"),
("Batch Size", batch_size),
Expand All @@ -155,6 +154,8 @@ def train(run_id: str, syn_dir: str, models_dir: str, save_every: int,

for p in optimizer.param_groups:
p["lr"] = lr
if hparams.tts_finetune_layers is not None and len(hparams.tts_finetune_layers) > 0:
model.finetune_partial(hparams.tts_finetune_layers)

data_loader = DataLoader(dataset,
collate_fn=collate_synthesizer,
Expand Down

0 comments on commit 724194a

Please sign in to comment.