Skip to content

Commit

Permalink
Add config for pruning and sampler
Browse files Browse the repository at this point in the history
  • Loading branch information
himkt committed Nov 14, 2020
1 parent 7c98ced commit ff60176
Showing 1 changed file with 70 additions and 0 deletions.
70 changes: 70 additions & 0 deletions config/imdb_optuna_with_pruning.jsonnet
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@

local batch_size = 64;
local cuda_device = 0;
local num_epochs = 15;
local seed = 42;

local embedding_dim = std.parseInt(std.extVar('embedding_dim'));
local dropout = std.parseJson(std.extVar('dropout'));
local lr = std.parseJson(std.extVar('lr'));
local max_filter_size = std.parseInt(std.extVar('max_filter_size'));
local num_filters = std.parseInt(std.extVar('num_filters'));
local output_dim = std.parseInt(std.extVar('output_dim'));
local ngram_filter_sizes = std.range(2, max_filter_size);

{
numpy_seed: seed,
pytorch_seed: seed,
random_seed: seed,
dataset_reader: {
lazy: false,
type: 'text_classification_json',
tokenizer: {
type: 'spacy',
},
token_indexers: {
tokens: {
type: 'single_id',
lowercase_tokens: true,
},
},
},
train_data_path: 'https://s3-us-west-2.amazonaws.com/allennlp/datasets/imdb/train.jsonl',
validation_data_path: 'https://s3-us-west-2.amazonaws.com/allennlp/datasets/imdb/dev.jsonl',
model: {
type: 'basic_classifier',
text_field_embedder: {
token_embedders: {
tokens: {
embedding_dim: embedding_dim,
},
},
},
seq2vec_encoder: {
type: 'cnn',
embedding_dim: embedding_dim,
ngram_filter_sizes: ngram_filter_sizes,
num_filters: num_filters,
output_dim: output_dim,
},
dropout: dropout,
},
data_loader: {
shuffle: true,
batch_size: batch_size,
},
trainer: {
cuda_device: cuda_device,
epoch_callbacks: [
{
type: 'optuna_pruner',
}
],
num_epochs: num_epochs,
optimizer: {
lr: lr,
type: 'sgd',
},
validation_metric: '+accuracy',
},
}

0 comments on commit ff60176

Please sign in to comment.