Commit 1ddedccd authored by Marius Kriegerowski's avatar Marius Kriegerowski

minimize test

parent 8d5f44a0
......@@ -79,7 +79,7 @@ class Model(Object):
inputs=input,
filters=n_filters,
kernel_size=(cross_channel_kernel, kernel_width), # use identity (1) along channels
activation=tf.nn.relu,
activation=self.activation,
bias_initializer=self.initializer,
name=name+'conv2d')
......@@ -104,25 +104,33 @@ class Model(Object):
training = bool(mode == tf.estimator.ModeKeys.TRAIN)
n_filters = 32
n_filters = params.get('base_capacity', 32)
kernel_width = params.get('kernel_width', 2)
kernel_width_factor = params.get('kernel_width_factor', 1)
self.activation = params.get('activation', tf.nn.relu)
n_channels, n_samples = self.data_generator.tensor_shape
features = tf.reshape(features, [-1, n_channels, n_samples, 1])
input = tf.reshape(features, [-1, n_channels, n_samples, 1])
# tf.summary.image('input', features)
for ilayer in range(params.get('n_layers', 3)):
input = self.time_axis_cnn(input, n_filters, 1,
kernel_width=int(kernel_width + ilayer*kernel_width_factor),
name='conv_%s' % ilayer, training=training)
# conv = self.time_axis_cnn(conv, n_filters*2, 1, kernel_width=2, name='conv2',
# training=training)
# conv = self.time_axis_cnn(conv, n_filters*4, 2, kernel_width=3, name='conv3',
# training=training)
# conv = self.time_axis_cnn(features, n_filters, None, kernel_width=3, name='conv1',
# training=training)
conv = self.time_axis_cnn(features, n_filters, 1, kernel_width=1, name='conv1',
training=training)
conv = self.time_axis_cnn(conv, n_filters*2, 1, kernel_width=2, name='conv2',
training=training)
conv = self.time_axis_cnn(conv, n_filters*4, 2, kernel_width=1, name='conv3',
training=training)
fc = tf.contrib.layers.flatten(conv)
fc = tf.layers.dense(fc, 512, activation=tf.nn.relu)
# fc = tf.layers.dense(fc, 1024, activation=tf.nn.relu)
if self.dropout_rate:
fc = tf.contrib.layers.flatten(input)
fc = tf.layers.dense(fc, params.get('n_filters_dense', 512),
activation=self.activation)
if params.get('dropout_rate', False):
fc = tf.layers.dropout(
fc, rate=params['dropout_rate'], training=training)
elif self.dropout_rate:
fc = tf.layers.dropout(
fc, rate=self.dropout_rate, training=training)
......
......@@ -2,8 +2,11 @@ from .util import delete_if_exists
from skopt import gp_minimize
from skopt.space import Real, Categorical, Integer
from pyrocko.guts import Object, Int, Float, List, Tuple, String
import logging
logger = logging.getLogger()
def to_skopt_real(x, name, prior):
return Real(low=x[0], high=x[1], prior=prior, name=name)
......@@ -20,39 +23,69 @@ class Optimizer(Object):
self.model = None
# self.dimensions = [
# to_skopt_real(self.learning_rate, 'learning_rate', 'log-uniform')]
self.optimizer_defaults = [
('learning_rate', 1e-4),
('base_capacity', 32),
('kernel_width', 2),
('kernel_width_factor', 2),
('n_filters_dense', 512),
('dropout_rate', 0.1),
]
self.dimensions = [
Real(low=1e-6, high=1e-2, prior='log-uniform',
name='learning_rate')
name='learning_rate'),
Integer(low=12, high=64, name='base_capacity'),
Integer(low=1, high=5, name='kernel_width'),
Real(low=1, high=3, prior='uniform', name='kernel_width_factor'),
Integer(low=64, high=1024, name='n_filters_dense'),
Real(low=0., high=0.4, prior='uniform', name='dropout_rate'),
]
print(self.dimensions)
@property
def optimizer_keys(self):
return [k for (k, default) in self.optimizer_defaults]
@property
def optimizer_values(self):
return [default for (k, default) in self.optimizer_defaults]
def announce_test(self, params):
logger.info('+' * 20)
logger.info('evaluating next set of parameters:')
base =' {}: {}\n'
for kv in params.items():
logger.info(base.format(*kv))
def evaluate(self, args):
''' wrapper to parse gp_minimize args to model.train'''
args = dict(zip(['learning_rate',], args))
args = dict(zip(self.optimizer_keys, args))
self.model.outdir = self.log_dir_name(args)
print(args)
self.announce_test(args)
return self.model.train(args)['loss']
def optimize(self, model):
'''Calling this method to optimize a :py:class:`pinky.model.Model`
instance. '''
self.model = model
if self.model.auto_clear:
delete_if_exists(self.log_path)
default_parameters = [self.learning_rate[-1]]
gp_minimize(
func=self.evaluate,
dimensions=self.dimensions,
acq_func='EI', # Expected Improvement
n_calls=self.n_calls,
x0=default_parameters,
x0=self.optimizer_values,
)
def log_dir_name(self, params):
'''Helper functions to transform `params` into a logging directory
name.'''
# The dir-name for the TensorBoard log-dir.
placeholders = '{}_{}_' * len(params)
identifiers = []
for k, v in params.items():
......@@ -61,7 +94,7 @@ class Optimizer(Object):
placeholders = placeholders.format(*identifiers)
log_dir = self.log_path + placeholders
logger.info('Created new logging directory: %s' % log_dir)
return log_dir
@classmethod
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment