partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
test
optimize_updates
General optimization function for Theano. Parameters: params - parameters gradients - gradients config - training config Returns: Theano updates :type config: deepy.TrainerConfig or dict
deepy/trainers/optimize.py
def optimize_updates(params, gradients, config=None, shapes=None): """ General optimization function for Theano. Parameters: params - parameters gradients - gradients config - training config Returns: Theano updates :type config: deepy.TrainerConfig or dict """ if config and isinstance(config, dict): config = TrainerConfig(config) # Clipping if config: clip_value = config.get("gradient_clipping", None) if clip_value: clip_constant = T.constant(clip_value, dtype=FLOATX) if config.avoid_compute_embed_norm: grad_norm = multiple_l2_norm([t[1] for t in zip(params, gradients) if not t[0].name.startswith("W_embed")]) else: grad_norm = multiple_l2_norm(gradients) isnan = T.or_(T.isnan(grad_norm), T.isinf(grad_norm)) multiplier = ifelse(grad_norm < clip_constant, T.constant(1., dtype=FLOATX), clip_constant / (grad_norm + EPSILON)) # Clip clipped_gradients = [] for param, g in zip(params, gradients): g = multiplier * g if config.avoid_nan: g = T.switch(isnan, np.float32(0.1) * param, g) if config.gradient_tolerance: g = ifelse(grad_norm > config.gradient_tolerance, T.zeros_like(g) + EPSILON, g) clipped_gradients.append(g) gradients = clipped_gradients # Regularization if config and config.weight_l2: regularized_gradients = [] for param, grad in zip(params, gradients): grad = grad + (2 * config.weight_l2 * param) regularized_gradients.append(grad) gradients = regularized_gradients # Avoid nan but not computing the norm # This is not recommended if config and config.avoid_nan and not config.gradient_clipping: logging.info("avoid NaN gradients") new_gradients = [] for grad in gradients: new_grad = ifelse(T.isnan(grad).any(), T.zeros_like(grad) + EPSILON, grad) new_gradients.append(new_grad) gradients = new_gradients # Find method method = "SGD" if config: method = config.get("method", method).upper() # Get Function func = None if method in ["SGD", "ADAGRAD", "ADADELTA", "FINETUNING_ADAGRAD"]: from cores.ada_family import ada_family_core func = ada_family_core elif method == "ADAM": from cores.adam import adam_core func = adam_core elif method == "RMSPROP": from cores.rmsprop import rmsprop_core func = rmsprop_core elif method == "MOMENTUM": from cores.momentum import momentum_core func = momentum_core if not func: raise NotImplementedError("method '%s' is not supported" % method) logging.info("optimize method=%s parameters=%s" % (method, str(params))) free_parameters = [] return_vals = wrap_core(func, config, params, gradients) if type(return_vals) == list and type(return_vals[0]) == list: updates, free_parameters = return_vals else: updates = return_vals # No free param recording if config and not config.record_free_params: free_parameters = [] # Weight bound if config.weight_bound: logging.info("apply weight bound of %.2f" % config.weight_bound) new_updates = [] for param, update_value in updates: bounded_value = (update_value * (T.abs_(update_value) <= config.weight_bound) + config.weight_bound * (update_value > config.weight_bound) + -config.weight_bound * (update_value < -config.weight_bound)) new_updates.append((param, bounded_value)) updates = new_updates return updates, free_parameters
def optimize_updates(params, gradients, config=None, shapes=None): """ General optimization function for Theano. Parameters: params - parameters gradients - gradients config - training config Returns: Theano updates :type config: deepy.TrainerConfig or dict """ if config and isinstance(config, dict): config = TrainerConfig(config) # Clipping if config: clip_value = config.get("gradient_clipping", None) if clip_value: clip_constant = T.constant(clip_value, dtype=FLOATX) if config.avoid_compute_embed_norm: grad_norm = multiple_l2_norm([t[1] for t in zip(params, gradients) if not t[0].name.startswith("W_embed")]) else: grad_norm = multiple_l2_norm(gradients) isnan = T.or_(T.isnan(grad_norm), T.isinf(grad_norm)) multiplier = ifelse(grad_norm < clip_constant, T.constant(1., dtype=FLOATX), clip_constant / (grad_norm + EPSILON)) # Clip clipped_gradients = [] for param, g in zip(params, gradients): g = multiplier * g if config.avoid_nan: g = T.switch(isnan, np.float32(0.1) * param, g) if config.gradient_tolerance: g = ifelse(grad_norm > config.gradient_tolerance, T.zeros_like(g) + EPSILON, g) clipped_gradients.append(g) gradients = clipped_gradients # Regularization if config and config.weight_l2: regularized_gradients = [] for param, grad in zip(params, gradients): grad = grad + (2 * config.weight_l2 * param) regularized_gradients.append(grad) gradients = regularized_gradients # Avoid nan but not computing the norm # This is not recommended if config and config.avoid_nan and not config.gradient_clipping: logging.info("avoid NaN gradients") new_gradients = [] for grad in gradients: new_grad = ifelse(T.isnan(grad).any(), T.zeros_like(grad) + EPSILON, grad) new_gradients.append(new_grad) gradients = new_gradients # Find method method = "SGD" if config: method = config.get("method", method).upper() # Get Function func = None if method in ["SGD", "ADAGRAD", "ADADELTA", "FINETUNING_ADAGRAD"]: from cores.ada_family import ada_family_core func = ada_family_core elif method == "ADAM": from cores.adam import adam_core func = adam_core elif method == "RMSPROP": from cores.rmsprop import rmsprop_core func = rmsprop_core elif method == "MOMENTUM": from cores.momentum import momentum_core func = momentum_core if not func: raise NotImplementedError("method '%s' is not supported" % method) logging.info("optimize method=%s parameters=%s" % (method, str(params))) free_parameters = [] return_vals = wrap_core(func, config, params, gradients) if type(return_vals) == list and type(return_vals[0]) == list: updates, free_parameters = return_vals else: updates = return_vals # No free param recording if config and not config.record_free_params: free_parameters = [] # Weight bound if config.weight_bound: logging.info("apply weight bound of %.2f" % config.weight_bound) new_updates = [] for param, update_value in updates: bounded_value = (update_value * (T.abs_(update_value) <= config.weight_bound) + config.weight_bound * (update_value > config.weight_bound) + -config.weight_bound * (update_value < -config.weight_bound)) new_updates.append((param, bounded_value)) updates = new_updates return updates, free_parameters
[ "General", "optimization", "function", "for", "Theano", ".", "Parameters", ":", "params", "-", "parameters", "gradients", "-", "gradients", "config", "-", "training", "config", "Returns", ":", "Theano", "updates", ":", "type", "config", ":", "deepy", ".", "TrainerConfig", "or", "dict" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/optimize.py#L19-L123
[ "def", "optimize_updates", "(", "params", ",", "gradients", ",", "config", "=", "None", ",", "shapes", "=", "None", ")", ":", "if", "config", "and", "isinstance", "(", "config", ",", "dict", ")", ":", "config", "=", "TrainerConfig", "(", "config", ")", "# Clipping", "if", "config", ":", "clip_value", "=", "config", ".", "get", "(", "\"gradient_clipping\"", ",", "None", ")", "if", "clip_value", ":", "clip_constant", "=", "T", ".", "constant", "(", "clip_value", ",", "dtype", "=", "FLOATX", ")", "if", "config", ".", "avoid_compute_embed_norm", ":", "grad_norm", "=", "multiple_l2_norm", "(", "[", "t", "[", "1", "]", "for", "t", "in", "zip", "(", "params", ",", "gradients", ")", "if", "not", "t", "[", "0", "]", ".", "name", ".", "startswith", "(", "\"W_embed\"", ")", "]", ")", "else", ":", "grad_norm", "=", "multiple_l2_norm", "(", "gradients", ")", "isnan", "=", "T", ".", "or_", "(", "T", ".", "isnan", "(", "grad_norm", ")", ",", "T", ".", "isinf", "(", "grad_norm", ")", ")", "multiplier", "=", "ifelse", "(", "grad_norm", "<", "clip_constant", ",", "T", ".", "constant", "(", "1.", ",", "dtype", "=", "FLOATX", ")", ",", "clip_constant", "/", "(", "grad_norm", "+", "EPSILON", ")", ")", "# Clip", "clipped_gradients", "=", "[", "]", "for", "param", ",", "g", "in", "zip", "(", "params", ",", "gradients", ")", ":", "g", "=", "multiplier", "*", "g", "if", "config", ".", "avoid_nan", ":", "g", "=", "T", ".", "switch", "(", "isnan", ",", "np", ".", "float32", "(", "0.1", ")", "*", "param", ",", "g", ")", "if", "config", ".", "gradient_tolerance", ":", "g", "=", "ifelse", "(", "grad_norm", ">", "config", ".", "gradient_tolerance", ",", "T", ".", "zeros_like", "(", "g", ")", "+", "EPSILON", ",", "g", ")", "clipped_gradients", ".", "append", "(", "g", ")", "gradients", "=", "clipped_gradients", "# Regularization", "if", "config", "and", "config", ".", "weight_l2", ":", "regularized_gradients", "=", "[", "]", "for", "param", ",", "grad", "in", "zip", "(", "params", ",", "gradients", ")", ":", "grad", "=", "grad", "+", "(", "2", "*", "config", ".", "weight_l2", "*", "param", ")", "regularized_gradients", ".", "append", "(", "grad", ")", "gradients", "=", "regularized_gradients", "# Avoid nan but not computing the norm", "# This is not recommended", "if", "config", "and", "config", ".", "avoid_nan", "and", "not", "config", ".", "gradient_clipping", ":", "logging", ".", "info", "(", "\"avoid NaN gradients\"", ")", "new_gradients", "=", "[", "]", "for", "grad", "in", "gradients", ":", "new_grad", "=", "ifelse", "(", "T", ".", "isnan", "(", "grad", ")", ".", "any", "(", ")", ",", "T", ".", "zeros_like", "(", "grad", ")", "+", "EPSILON", ",", "grad", ")", "new_gradients", ".", "append", "(", "new_grad", ")", "gradients", "=", "new_gradients", "# Find method", "method", "=", "\"SGD\"", "if", "config", ":", "method", "=", "config", ".", "get", "(", "\"method\"", ",", "method", ")", ".", "upper", "(", ")", "# Get Function", "func", "=", "None", "if", "method", "in", "[", "\"SGD\"", ",", "\"ADAGRAD\"", ",", "\"ADADELTA\"", ",", "\"FINETUNING_ADAGRAD\"", "]", ":", "from", "cores", ".", "ada_family", "import", "ada_family_core", "func", "=", "ada_family_core", "elif", "method", "==", "\"ADAM\"", ":", "from", "cores", ".", "adam", "import", "adam_core", "func", "=", "adam_core", "elif", "method", "==", "\"RMSPROP\"", ":", "from", "cores", ".", "rmsprop", "import", "rmsprop_core", "func", "=", "rmsprop_core", "elif", "method", "==", "\"MOMENTUM\"", ":", "from", "cores", ".", "momentum", "import", "momentum_core", "func", "=", "momentum_core", "if", "not", "func", ":", "raise", "NotImplementedError", "(", "\"method '%s' is not supported\"", "%", "method", ")", "logging", ".", "info", "(", "\"optimize method=%s parameters=%s\"", "%", "(", "method", ",", "str", "(", "params", ")", ")", ")", "free_parameters", "=", "[", "]", "return_vals", "=", "wrap_core", "(", "func", ",", "config", ",", "params", ",", "gradients", ")", "if", "type", "(", "return_vals", ")", "==", "list", "and", "type", "(", "return_vals", "[", "0", "]", ")", "==", "list", ":", "updates", ",", "free_parameters", "=", "return_vals", "else", ":", "updates", "=", "return_vals", "# No free param recording", "if", "config", "and", "not", "config", ".", "record_free_params", ":", "free_parameters", "=", "[", "]", "# Weight bound", "if", "config", ".", "weight_bound", ":", "logging", ".", "info", "(", "\"apply weight bound of %.2f\"", "%", "config", ".", "weight_bound", ")", "new_updates", "=", "[", "]", "for", "param", ",", "update_value", "in", "updates", ":", "bounded_value", "=", "(", "update_value", "*", "(", "T", ".", "abs_", "(", "update_value", ")", "<=", "config", ".", "weight_bound", ")", "+", "config", ".", "weight_bound", "*", "(", "update_value", ">", "config", ".", "weight_bound", ")", "+", "-", "config", ".", "weight_bound", "*", "(", "update_value", "<", "-", "config", ".", "weight_bound", ")", ")", "new_updates", ".", "append", "(", "(", "param", ",", "bounded_value", ")", ")", "updates", "=", "new_updates", "return", "updates", ",", "free_parameters" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
optimize_function
Create a optimizing function receives gradients. Parameters: params - parameters config - training configuration Returns: updating function receives gradients
deepy/trainers/optimize.py
def optimize_function(params, config=None): """ Create a optimizing function receives gradients. Parameters: params - parameters config - training configuration Returns: updating function receives gradients """ gs = [dim_to_var(p.ndim) for p in params] updates, _ = optimize_updates(params, gs, config) return theano.function(gs, [], updates=updates)
def optimize_function(params, config=None): """ Create a optimizing function receives gradients. Parameters: params - parameters config - training configuration Returns: updating function receives gradients """ gs = [dim_to_var(p.ndim) for p in params] updates, _ = optimize_updates(params, gs, config) return theano.function(gs, [], updates=updates)
[ "Create", "a", "optimizing", "function", "receives", "gradients", ".", "Parameters", ":", "params", "-", "parameters", "config", "-", "training", "configuration", "Returns", ":", "updating", "function", "receives", "gradients" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/optimize.py#L125-L136
[ "def", "optimize_function", "(", "params", ",", "config", "=", "None", ")", ":", "gs", "=", "[", "dim_to_var", "(", "p", ".", "ndim", ")", "for", "p", "in", "params", "]", "updates", ",", "_", "=", "optimize_updates", "(", "params", ",", "gs", ",", "config", ")", "return", "theano", ".", "function", "(", "gs", ",", "[", "]", ",", "updates", "=", "updates", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
GeneralNeuralTrainer._learning_updates
Return updates in the training.
deepy/trainers/trainers.py
def _learning_updates(self): """ Return updates in the training. """ params = self.training_params() gradients = self.get_gradients(params) return self.optimization_updates(params, gradients)
def _learning_updates(self): """ Return updates in the training. """ params = self.training_params() gradients = self.get_gradients(params) return self.optimization_updates(params, gradients)
[ "Return", "updates", "in", "the", "training", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/trainers.py#L40-L46
[ "def", "_learning_updates", "(", "self", ")", ":", "params", "=", "self", ".", "training_params", "(", ")", "gradients", "=", "self", ".", "get_gradients", "(", "params", ")", "return", "self", ".", "optimization_updates", "(", "params", ",", "gradients", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
GeneralNeuralTrainer.training_params
Get parameters to be optimized.
deepy/trainers/trainers.py
def training_params(self): """ Get parameters to be optimized. """ params = self.network.parameters # Freeze parameters if self.config.fixed_parameters: logging.info("fixed parameters: %s" % ", ".join(map(str, self.config.fixed_parameters))) params = [p for p in params if p not in self.config.fixed_parameters] return params
def training_params(self): """ Get parameters to be optimized. """ params = self.network.parameters # Freeze parameters if self.config.fixed_parameters: logging.info("fixed parameters: %s" % ", ".join(map(str, self.config.fixed_parameters))) params = [p for p in params if p not in self.config.fixed_parameters] return params
[ "Get", "parameters", "to", "be", "optimized", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/trainers.py#L48-L57
[ "def", "training_params", "(", "self", ")", ":", "params", "=", "self", ".", "network", ".", "parameters", "# Freeze parameters", "if", "self", ".", "config", ".", "fixed_parameters", ":", "logging", ".", "info", "(", "\"fixed parameters: %s\"", "%", "\", \"", ".", "join", "(", "map", "(", "str", ",", "self", ".", "config", ".", "fixed_parameters", ")", ")", ")", "params", "=", "[", "p", "for", "p", "in", "params", "if", "p", "not", "in", "self", ".", "config", ".", "fixed_parameters", "]", "return", "params" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
GeneralNeuralTrainer.optimization_updates
Return updates from optimization.
deepy/trainers/trainers.py
def optimization_updates(self, params, gradients): """ Return updates from optimization. """ updates, free_parameters = optimize_updates(params, gradients, self.config) self.network.free_parameters.extend(free_parameters) logging.info("Added %d free parameters for optimization" % len(free_parameters)) return updates
def optimization_updates(self, params, gradients): """ Return updates from optimization. """ updates, free_parameters = optimize_updates(params, gradients, self.config) self.network.free_parameters.extend(free_parameters) logging.info("Added %d free parameters for optimization" % len(free_parameters)) return updates
[ "Return", "updates", "from", "optimization", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/trainers.py#L65-L72
[ "def", "optimization_updates", "(", "self", ",", "params", ",", "gradients", ")", ":", "updates", ",", "free_parameters", "=", "optimize_updates", "(", "params", ",", "gradients", ",", "self", ".", "config", ")", "self", ".", "network", ".", "free_parameters", ".", "extend", "(", "free_parameters", ")", "logging", ".", "info", "(", "\"Added %d free parameters for optimization\"", "%", "len", "(", "free_parameters", ")", ")", "return", "updates" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
GeneralNeuralTrainer.learning_function
Get the learning function. :param func: :return:
deepy/trainers/trainers.py
def learning_function(self): """ Get the learning function. :param func: :return: """ network_updates = list(self.network.updates) + list(self.network.training_updates) learning_updates = list(self._learning_updates()) update_list = network_updates + learning_updates logging.info("network updates: %s" % " ".join(map(str, [x[0] for x in network_updates]))) logging.info("learning updates: %s" % " ".join(map(str, [x[0] for x in learning_updates]))) variables = self.network.input_variables + self.network.target_variables givens = None return theano.function( variables, map(lambda v: theano.Out(v, borrow=True), self.training_variables), updates=update_list, allow_input_downcast=True, mode=self.config.get("theano_mode", None), givens=givens)
def learning_function(self): """ Get the learning function. :param func: :return: """ network_updates = list(self.network.updates) + list(self.network.training_updates) learning_updates = list(self._learning_updates()) update_list = network_updates + learning_updates logging.info("network updates: %s" % " ".join(map(str, [x[0] for x in network_updates]))) logging.info("learning updates: %s" % " ".join(map(str, [x[0] for x in learning_updates]))) variables = self.network.input_variables + self.network.target_variables givens = None return theano.function( variables, map(lambda v: theano.Out(v, borrow=True), self.training_variables), updates=update_list, allow_input_downcast=True, mode=self.config.get("theano_mode", None), givens=givens)
[ "Get", "the", "learning", "function", ".", ":", "param", "func", ":", ":", "return", ":" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/trainers.py#L74-L94
[ "def", "learning_function", "(", "self", ")", ":", "network_updates", "=", "list", "(", "self", ".", "network", ".", "updates", ")", "+", "list", "(", "self", ".", "network", ".", "training_updates", ")", "learning_updates", "=", "list", "(", "self", ".", "_learning_updates", "(", ")", ")", "update_list", "=", "network_updates", "+", "learning_updates", "logging", ".", "info", "(", "\"network updates: %s\"", "%", "\" \"", ".", "join", "(", "map", "(", "str", ",", "[", "x", "[", "0", "]", "for", "x", "in", "network_updates", "]", ")", ")", ")", "logging", ".", "info", "(", "\"learning updates: %s\"", "%", "\" \"", ".", "join", "(", "map", "(", "str", ",", "[", "x", "[", "0", "]", "for", "x", "in", "learning_updates", "]", ")", ")", ")", "variables", "=", "self", ".", "network", ".", "input_variables", "+", "self", ".", "network", ".", "target_variables", "givens", "=", "None", "return", "theano", ".", "function", "(", "variables", ",", "map", "(", "lambda", "v", ":", "theano", ".", "Out", "(", "v", ",", "borrow", "=", "True", ")", ",", "self", ".", "training_variables", ")", ",", "updates", "=", "update_list", ",", "allow_input_downcast", "=", "True", ",", "mode", "=", "self", ".", "config", ".", "get", "(", "\"theano_mode\"", ",", "None", ")", ",", "givens", "=", "givens", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
AttentionLayer._glimpse_sensor
Parameters: x_t - 28x28 image l_p - 2x1 focus vector Returns: 4x12 matrix
examples/attention_models/baseline_model.py
def _glimpse_sensor(self, x_t, l_p): """ Parameters: x_t - 28x28 image l_p - 2x1 focus vector Returns: 4x12 matrix """ # Turn l_p to the left-top point of rectangle l_p = l_p * 14 + 14 - 2 l_p = T.cast(T.round(l_p), "int32") l_p = l_p * (l_p >= 0) l_p = l_p * (l_p < 24) + (l_p >= 24) * 23 l_p2 = l_p - 2 l_p2 = l_p2 * (l_p2 >= 0) l_p2 = l_p2 * (l_p2 < 20) + (l_p2 >= 20) * 19 l_p3 = l_p - 6 l_p3 = l_p3 * (l_p3 >= 0) l_p3 = l_p3 * (l_p3 < 16) + (l_p3 >= 16) * 15 glimpse_1 = x_t[l_p[0]: l_p[0] + 4][:, l_p[1]: l_p[1] + 4] glimpse_2 = x_t[l_p2[0]: l_p2[0] + 8][:, l_p2[1]: l_p2[1] + 8] glimpse_2 = theano.tensor.signal.downsample.max_pool_2d(glimpse_2, (2,2)) glimpse_3 = x_t[l_p3[0]: l_p3[0] + 16][:, l_p3[1]: l_p3[1] + 16] glimpse_3 = theano.tensor.signal.downsample.max_pool_2d(glimpse_3, (4,4)) return T.concatenate([glimpse_1, glimpse_2, glimpse_3])
def _glimpse_sensor(self, x_t, l_p): """ Parameters: x_t - 28x28 image l_p - 2x1 focus vector Returns: 4x12 matrix """ # Turn l_p to the left-top point of rectangle l_p = l_p * 14 + 14 - 2 l_p = T.cast(T.round(l_p), "int32") l_p = l_p * (l_p >= 0) l_p = l_p * (l_p < 24) + (l_p >= 24) * 23 l_p2 = l_p - 2 l_p2 = l_p2 * (l_p2 >= 0) l_p2 = l_p2 * (l_p2 < 20) + (l_p2 >= 20) * 19 l_p3 = l_p - 6 l_p3 = l_p3 * (l_p3 >= 0) l_p3 = l_p3 * (l_p3 < 16) + (l_p3 >= 16) * 15 glimpse_1 = x_t[l_p[0]: l_p[0] + 4][:, l_p[1]: l_p[1] + 4] glimpse_2 = x_t[l_p2[0]: l_p2[0] + 8][:, l_p2[1]: l_p2[1] + 8] glimpse_2 = theano.tensor.signal.downsample.max_pool_2d(glimpse_2, (2,2)) glimpse_3 = x_t[l_p3[0]: l_p3[0] + 16][:, l_p3[1]: l_p3[1] + 16] glimpse_3 = theano.tensor.signal.downsample.max_pool_2d(glimpse_3, (4,4)) return T.concatenate([glimpse_1, glimpse_2, glimpse_3])
[ "Parameters", ":", "x_t", "-", "28x28", "image", "l_p", "-", "2x1", "focus", "vector", "Returns", ":", "4x12", "matrix" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/examples/attention_models/baseline_model.py#L37-L62
[ "def", "_glimpse_sensor", "(", "self", ",", "x_t", ",", "l_p", ")", ":", "# Turn l_p to the left-top point of rectangle", "l_p", "=", "l_p", "*", "14", "+", "14", "-", "2", "l_p", "=", "T", ".", "cast", "(", "T", ".", "round", "(", "l_p", ")", ",", "\"int32\"", ")", "l_p", "=", "l_p", "*", "(", "l_p", ">=", "0", ")", "l_p", "=", "l_p", "*", "(", "l_p", "<", "24", ")", "+", "(", "l_p", ">=", "24", ")", "*", "23", "l_p2", "=", "l_p", "-", "2", "l_p2", "=", "l_p2", "*", "(", "l_p2", ">=", "0", ")", "l_p2", "=", "l_p2", "*", "(", "l_p2", "<", "20", ")", "+", "(", "l_p2", ">=", "20", ")", "*", "19", "l_p3", "=", "l_p", "-", "6", "l_p3", "=", "l_p3", "*", "(", "l_p3", ">=", "0", ")", "l_p3", "=", "l_p3", "*", "(", "l_p3", "<", "16", ")", "+", "(", "l_p3", ">=", "16", ")", "*", "15", "glimpse_1", "=", "x_t", "[", "l_p", "[", "0", "]", ":", "l_p", "[", "0", "]", "+", "4", "]", "[", ":", ",", "l_p", "[", "1", "]", ":", "l_p", "[", "1", "]", "+", "4", "]", "glimpse_2", "=", "x_t", "[", "l_p2", "[", "0", "]", ":", "l_p2", "[", "0", "]", "+", "8", "]", "[", ":", ",", "l_p2", "[", "1", "]", ":", "l_p2", "[", "1", "]", "+", "8", "]", "glimpse_2", "=", "theano", ".", "tensor", ".", "signal", ".", "downsample", ".", "max_pool_2d", "(", "glimpse_2", ",", "(", "2", ",", "2", ")", ")", "glimpse_3", "=", "x_t", "[", "l_p3", "[", "0", "]", ":", "l_p3", "[", "0", "]", "+", "16", "]", "[", ":", ",", "l_p3", "[", "1", "]", ":", "l_p3", "[", "1", "]", "+", "16", "]", "glimpse_3", "=", "theano", ".", "tensor", ".", "signal", ".", "downsample", ".", "max_pool_2d", "(", "glimpse_3", ",", "(", "4", ",", "4", ")", ")", "return", "T", ".", "concatenate", "(", "[", "glimpse_1", ",", "glimpse_2", ",", "glimpse_3", "]", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
AttentionLayer._refined_glimpse_sensor
Parameters: x_t - 28x28 image l_p - 2x1 focus vector Returns: 7*14 matrix
examples/attention_models/baseline_model.py
def _refined_glimpse_sensor(self, x_t, l_p): """ Parameters: x_t - 28x28 image l_p - 2x1 focus vector Returns: 7*14 matrix """ # Turn l_p to the left-top point of rectangle l_p = l_p * 14 + 14 - 4 l_p = T.cast(T.round(l_p), "int32") l_p = l_p * (l_p >= 0) l_p = l_p * (l_p < 21) + (l_p >= 21) * 20 glimpse_1 = x_t[l_p[0]: l_p[0] + 7][:, l_p[1]: l_p[1] + 7] # glimpse_2 = theano.tensor.signal.downsample.max_pool_2d(x_t, (4,4)) # return T.concatenate([glimpse_1, glimpse_2]) return glimpse_1
def _refined_glimpse_sensor(self, x_t, l_p): """ Parameters: x_t - 28x28 image l_p - 2x1 focus vector Returns: 7*14 matrix """ # Turn l_p to the left-top point of rectangle l_p = l_p * 14 + 14 - 4 l_p = T.cast(T.round(l_p), "int32") l_p = l_p * (l_p >= 0) l_p = l_p * (l_p < 21) + (l_p >= 21) * 20 glimpse_1 = x_t[l_p[0]: l_p[0] + 7][:, l_p[1]: l_p[1] + 7] # glimpse_2 = theano.tensor.signal.downsample.max_pool_2d(x_t, (4,4)) # return T.concatenate([glimpse_1, glimpse_2]) return glimpse_1
[ "Parameters", ":", "x_t", "-", "28x28", "image", "l_p", "-", "2x1", "focus", "vector", "Returns", ":", "7", "*", "14", "matrix" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/examples/attention_models/baseline_model.py#L64-L81
[ "def", "_refined_glimpse_sensor", "(", "self", ",", "x_t", ",", "l_p", ")", ":", "# Turn l_p to the left-top point of rectangle", "l_p", "=", "l_p", "*", "14", "+", "14", "-", "4", "l_p", "=", "T", ".", "cast", "(", "T", ".", "round", "(", "l_p", ")", ",", "\"int32\"", ")", "l_p", "=", "l_p", "*", "(", "l_p", ">=", "0", ")", "l_p", "=", "l_p", "*", "(", "l_p", "<", "21", ")", "+", "(", "l_p", ">=", "21", ")", "*", "20", "glimpse_1", "=", "x_t", "[", "l_p", "[", "0", "]", ":", "l_p", "[", "0", "]", "+", "7", "]", "[", ":", ",", "l_p", "[", "1", "]", ":", "l_p", "[", "1", "]", "+", "7", "]", "# glimpse_2 = theano.tensor.signal.downsample.max_pool_2d(x_t, (4,4))", "# return T.concatenate([glimpse_1, glimpse_2])", "return", "glimpse_1" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
AttentionLayer._glimpse_network
Parameters: x_t - 28x28 image l_p - 2x1 focus vector Returns: 4x12 matrix
examples/attention_models/baseline_model.py
def _glimpse_network(self, x_t, l_p): """ Parameters: x_t - 28x28 image l_p - 2x1 focus vector Returns: 4x12 matrix """ sensor_output = self._refined_glimpse_sensor(x_t, l_p) sensor_output = T.flatten(sensor_output) h_g = self._relu(T.dot(sensor_output, self.W_g0)) h_l = self._relu(T.dot(l_p, self.W_g1)) g = self._relu(T.dot(h_g, self.W_g2_hg) + T.dot(h_l, self.W_g2_hl)) return g
def _glimpse_network(self, x_t, l_p): """ Parameters: x_t - 28x28 image l_p - 2x1 focus vector Returns: 4x12 matrix """ sensor_output = self._refined_glimpse_sensor(x_t, l_p) sensor_output = T.flatten(sensor_output) h_g = self._relu(T.dot(sensor_output, self.W_g0)) h_l = self._relu(T.dot(l_p, self.W_g1)) g = self._relu(T.dot(h_g, self.W_g2_hg) + T.dot(h_l, self.W_g2_hl)) return g
[ "Parameters", ":", "x_t", "-", "28x28", "image", "l_p", "-", "2x1", "focus", "vector", "Returns", ":", "4x12", "matrix" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/examples/attention_models/baseline_model.py#L88-L101
[ "def", "_glimpse_network", "(", "self", ",", "x_t", ",", "l_p", ")", ":", "sensor_output", "=", "self", ".", "_refined_glimpse_sensor", "(", "x_t", ",", "l_p", ")", "sensor_output", "=", "T", ".", "flatten", "(", "sensor_output", ")", "h_g", "=", "self", ".", "_relu", "(", "T", ".", "dot", "(", "sensor_output", ",", "self", ".", "W_g0", ")", ")", "h_l", "=", "self", ".", "_relu", "(", "T", ".", "dot", "(", "l_p", ",", "self", ".", "W_g1", ")", ")", "g", "=", "self", ".", "_relu", "(", "T", ".", "dot", "(", "h_g", ",", "self", ".", "W_g2_hg", ")", "+", "T", ".", "dot", "(", "h_l", ",", "self", ".", "W_g2_hl", ")", ")", "return", "g" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
AttentionLayer._action_network
Parameters: h_t - 256x1 vector Returns: 10x1 vector
examples/attention_models/baseline_model.py
def _action_network(self, h_t): """ Parameters: h_t - 256x1 vector Returns: 10x1 vector """ z = self._relu(T.dot(h_t, self.W_a) + self.B_a) return self._softmax(z)
def _action_network(self, h_t): """ Parameters: h_t - 256x1 vector Returns: 10x1 vector """ z = self._relu(T.dot(h_t, self.W_a) + self.B_a) return self._softmax(z)
[ "Parameters", ":", "h_t", "-", "256x1", "vector", "Returns", ":", "10x1", "vector" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/examples/attention_models/baseline_model.py#L112-L120
[ "def", "_action_network", "(", "self", ",", "h_t", ")", ":", "z", "=", "self", ".", "_relu", "(", "T", ".", "dot", "(", "h_t", ",", "self", ".", "W_a", ")", "+", "self", ".", "B_a", ")", "return", "self", ".", "_softmax", "(", "z", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
get_network
Get baseline model. Parameters: model - model path Returns: network
examples/attention_models/first_glimpse_model.py
def get_network(model=None, std=0.005, disable_reinforce=False, random_glimpse=False): """ Get baseline model. Parameters: model - model path Returns: network """ network = NeuralClassifier(input_dim=28 * 28) network.stack_layer(FirstGlimpseLayer(std=std, disable_reinforce=disable_reinforce, random_glimpse=random_glimpse)) if model and os.path.exists(model): network.load_params(model) return network
def get_network(model=None, std=0.005, disable_reinforce=False, random_glimpse=False): """ Get baseline model. Parameters: model - model path Returns: network """ network = NeuralClassifier(input_dim=28 * 28) network.stack_layer(FirstGlimpseLayer(std=std, disable_reinforce=disable_reinforce, random_glimpse=random_glimpse)) if model and os.path.exists(model): network.load_params(model) return network
[ "Get", "baseline", "model", ".", "Parameters", ":", "model", "-", "model", "path", "Returns", ":", "network" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/examples/attention_models/first_glimpse_model.py#L192-L204
[ "def", "get_network", "(", "model", "=", "None", ",", "std", "=", "0.005", ",", "disable_reinforce", "=", "False", ",", "random_glimpse", "=", "False", ")", ":", "network", "=", "NeuralClassifier", "(", "input_dim", "=", "28", "*", "28", ")", "network", ".", "stack_layer", "(", "FirstGlimpseLayer", "(", "std", "=", "std", ",", "disable_reinforce", "=", "disable_reinforce", ",", "random_glimpse", "=", "random_glimpse", ")", ")", "if", "model", "and", "os", ".", "path", ".", "exists", "(", "model", ")", ":", "network", ".", "load_params", "(", "model", ")", "return", "network" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
FirstGlimpseLayer._first_glimpse_sensor
Compute first glimpse position using down-sampled image.
examples/attention_models/first_glimpse_model.py
def _first_glimpse_sensor(self, x_t): """ Compute first glimpse position using down-sampled image. """ downsampled_img = theano.tensor.signal.downsample.max_pool_2d(x_t, (4,4)) downsampled_img = downsampled_img.flatten() first_l = T.dot(downsampled_img, self.W_f) if self.disable_reinforce: wf_grad = self.W_f if self.random_glimpse: first_l = self.srng.uniform((2,), low=-1.7, high=1.7) else: sampled_l_t = self._sample_gaussian(first_l, self.cov) sampled_pdf = self._multi_gaussian_pdf(disconnected_grad(sampled_l_t), first_l) wf_grad = T.grad(T.log(sampled_pdf), self.W_f) first_l = sampled_l_t return first_l, wf_grad
def _first_glimpse_sensor(self, x_t): """ Compute first glimpse position using down-sampled image. """ downsampled_img = theano.tensor.signal.downsample.max_pool_2d(x_t, (4,4)) downsampled_img = downsampled_img.flatten() first_l = T.dot(downsampled_img, self.W_f) if self.disable_reinforce: wf_grad = self.W_f if self.random_glimpse: first_l = self.srng.uniform((2,), low=-1.7, high=1.7) else: sampled_l_t = self._sample_gaussian(first_l, self.cov) sampled_pdf = self._multi_gaussian_pdf(disconnected_grad(sampled_l_t), first_l) wf_grad = T.grad(T.log(sampled_pdf), self.W_f) first_l = sampled_l_t return first_l, wf_grad
[ "Compute", "first", "glimpse", "position", "using", "down", "-", "sampled", "image", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/examples/attention_models/first_glimpse_model.py#L38-L54
[ "def", "_first_glimpse_sensor", "(", "self", ",", "x_t", ")", ":", "downsampled_img", "=", "theano", ".", "tensor", ".", "signal", ".", "downsample", ".", "max_pool_2d", "(", "x_t", ",", "(", "4", ",", "4", ")", ")", "downsampled_img", "=", "downsampled_img", ".", "flatten", "(", ")", "first_l", "=", "T", ".", "dot", "(", "downsampled_img", ",", "self", ".", "W_f", ")", "if", "self", ".", "disable_reinforce", ":", "wf_grad", "=", "self", ".", "W_f", "if", "self", ".", "random_glimpse", ":", "first_l", "=", "self", ".", "srng", ".", "uniform", "(", "(", "2", ",", ")", ",", "low", "=", "-", "1.7", ",", "high", "=", "1.7", ")", "else", ":", "sampled_l_t", "=", "self", ".", "_sample_gaussian", "(", "first_l", ",", "self", ".", "cov", ")", "sampled_pdf", "=", "self", ".", "_multi_gaussian_pdf", "(", "disconnected_grad", "(", "sampled_l_t", ")", ",", "first_l", ")", "wf_grad", "=", "T", ".", "grad", "(", "T", ".", "log", "(", "sampled_pdf", ")", ",", "self", ".", "W_f", ")", "first_l", "=", "sampled_l_t", "return", "first_l", ",", "wf_grad" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
FirstGlimpseLayer._core_network
Parameters: x_t - 28x28 image l_p - 2x1 focus vector h_p - 256x1 vector Returns: h_t, 256x1 vector
examples/attention_models/first_glimpse_model.py
def _core_network(self, l_p, h_p, x_t): """ Parameters: x_t - 28x28 image l_p - 2x1 focus vector h_p - 256x1 vector Returns: h_t, 256x1 vector """ g_t = self._glimpse_network(x_t, l_p) h_t = self._tanh(T.dot(g_t, self.W_h_g) + T.dot(h_p, self.W_h) + self.B_h) l_t = self._location_network(h_t) if not self.disable_reinforce: sampled_l_t = self._sample_gaussian(l_t, self.cov) sampled_pdf = self._multi_gaussian_pdf(disconnected_grad(sampled_l_t), l_t) wl_grad = T.grad(T.log(sampled_pdf), self.W_l) else: sampled_l_t = l_t wl_grad = self.W_l if self.random_glimpse and self.disable_reinforce: sampled_l_t = self.srng.uniform((2,), low=-1.7, high=1.7) a_t = self._action_network(h_t) return sampled_l_t, h_t, a_t, wl_grad
def _core_network(self, l_p, h_p, x_t): """ Parameters: x_t - 28x28 image l_p - 2x1 focus vector h_p - 256x1 vector Returns: h_t, 256x1 vector """ g_t = self._glimpse_network(x_t, l_p) h_t = self._tanh(T.dot(g_t, self.W_h_g) + T.dot(h_p, self.W_h) + self.B_h) l_t = self._location_network(h_t) if not self.disable_reinforce: sampled_l_t = self._sample_gaussian(l_t, self.cov) sampled_pdf = self._multi_gaussian_pdf(disconnected_grad(sampled_l_t), l_t) wl_grad = T.grad(T.log(sampled_pdf), self.W_l) else: sampled_l_t = l_t wl_grad = self.W_l if self.random_glimpse and self.disable_reinforce: sampled_l_t = self.srng.uniform((2,), low=-1.7, high=1.7) a_t = self._action_network(h_t) return sampled_l_t, h_t, a_t, wl_grad
[ "Parameters", ":", "x_t", "-", "28x28", "image", "l_p", "-", "2x1", "focus", "vector", "h_p", "-", "256x1", "vector", "Returns", ":", "h_t", "256x1", "vector" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/examples/attention_models/first_glimpse_model.py#L107-L133
[ "def", "_core_network", "(", "self", ",", "l_p", ",", "h_p", ",", "x_t", ")", ":", "g_t", "=", "self", ".", "_glimpse_network", "(", "x_t", ",", "l_p", ")", "h_t", "=", "self", ".", "_tanh", "(", "T", ".", "dot", "(", "g_t", ",", "self", ".", "W_h_g", ")", "+", "T", ".", "dot", "(", "h_p", ",", "self", ".", "W_h", ")", "+", "self", ".", "B_h", ")", "l_t", "=", "self", ".", "_location_network", "(", "h_t", ")", "if", "not", "self", ".", "disable_reinforce", ":", "sampled_l_t", "=", "self", ".", "_sample_gaussian", "(", "l_t", ",", "self", ".", "cov", ")", "sampled_pdf", "=", "self", ".", "_multi_gaussian_pdf", "(", "disconnected_grad", "(", "sampled_l_t", ")", ",", "l_t", ")", "wl_grad", "=", "T", ".", "grad", "(", "T", ".", "log", "(", "sampled_pdf", ")", ",", "self", ".", "W_l", ")", "else", ":", "sampled_l_t", "=", "l_t", "wl_grad", "=", "self", ".", "W_l", "if", "self", ".", "random_glimpse", "and", "self", ".", "disable_reinforce", ":", "sampled_l_t", "=", "self", ".", "srng", ".", "uniform", "(", "(", "2", ",", ")", ",", "low", "=", "-", "1.7", ",", "high", "=", "1.7", ")", "a_t", "=", "self", ".", "_action_network", "(", "h_t", ")", "return", "sampled_l_t", ",", "h_t", ",", "a_t", ",", "wl_grad" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
MyJointTrainingModel.prepare
All codes that create parameters should be put into 'setup' function.
examples/tutorials/tutorial2.py
def prepare(self): """ All codes that create parameters should be put into 'setup' function. """ self.output_dim = 10 self.encoder = Chain(self.input_dim).stack(Dense(self.internal_layer_size, 'tanh')) self.decoder = Chain(self.internal_layer_size).stack(Dense(self.input_dim)) self.classifier = Chain(self.internal_layer_size).stack(Dense(50, 'tanh'), Dense(self.output_dim), Softmax()) self.register_inner_layers(self.encoder, self.decoder, self.classifier) self.target_input = T.ivector('target') self.register_external_inputs(self.target_input)
def prepare(self): """ All codes that create parameters should be put into 'setup' function. """ self.output_dim = 10 self.encoder = Chain(self.input_dim).stack(Dense(self.internal_layer_size, 'tanh')) self.decoder = Chain(self.internal_layer_size).stack(Dense(self.input_dim)) self.classifier = Chain(self.internal_layer_size).stack(Dense(50, 'tanh'), Dense(self.output_dim), Softmax()) self.register_inner_layers(self.encoder, self.decoder, self.classifier) self.target_input = T.ivector('target') self.register_external_inputs(self.target_input)
[ "All", "codes", "that", "create", "parameters", "should", "be", "put", "into", "setup", "function", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/examples/tutorials/tutorial2.py#L27-L41
[ "def", "prepare", "(", "self", ")", ":", "self", ".", "output_dim", "=", "10", "self", ".", "encoder", "=", "Chain", "(", "self", ".", "input_dim", ")", ".", "stack", "(", "Dense", "(", "self", ".", "internal_layer_size", ",", "'tanh'", ")", ")", "self", ".", "decoder", "=", "Chain", "(", "self", ".", "internal_layer_size", ")", ".", "stack", "(", "Dense", "(", "self", ".", "input_dim", ")", ")", "self", ".", "classifier", "=", "Chain", "(", "self", ".", "internal_layer_size", ")", ".", "stack", "(", "Dense", "(", "50", ",", "'tanh'", ")", ",", "Dense", "(", "self", ".", "output_dim", ")", ",", "Softmax", "(", ")", ")", "self", ".", "register_inner_layers", "(", "self", ".", "encoder", ",", "self", ".", "decoder", ",", "self", ".", "classifier", ")", "self", ".", "target_input", "=", "T", ".", "ivector", "(", "'target'", ")", "self", ".", "register_external_inputs", "(", "self", ".", "target_input", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
MyJointTrainingModel.compute_tensor
Build the computation graph here.
examples/tutorials/tutorial2.py
def compute_tensor(self, x): """ Build the computation graph here. """ internal_variable = self.encoder.compute_tensor(x) decoding_output = self.decoder.compute_tensor(internal_variable) classification_output = self.classifier.compute_tensor(internal_variable) auto_encoder_cost = AutoEncoderCost(decoding_output, x).get() classification_cost = CrossEntropyCost(classification_output, self.target_input).get() final_cost = 0.01 * auto_encoder_cost + classification_cost error_rate = ErrorRateCost(classification_output, self.target_input).get() self.register_monitors(("err", error_rate), ("encoder_cost", auto_encoder_cost), ("classify_cost", classification_cost)) return final_cost
def compute_tensor(self, x): """ Build the computation graph here. """ internal_variable = self.encoder.compute_tensor(x) decoding_output = self.decoder.compute_tensor(internal_variable) classification_output = self.classifier.compute_tensor(internal_variable) auto_encoder_cost = AutoEncoderCost(decoding_output, x).get() classification_cost = CrossEntropyCost(classification_output, self.target_input).get() final_cost = 0.01 * auto_encoder_cost + classification_cost error_rate = ErrorRateCost(classification_output, self.target_input).get() self.register_monitors(("err", error_rate), ("encoder_cost", auto_encoder_cost), ("classify_cost", classification_cost)) return final_cost
[ "Build", "the", "computation", "graph", "here", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/examples/tutorials/tutorial2.py#L43-L65
[ "def", "compute_tensor", "(", "self", ",", "x", ")", ":", "internal_variable", "=", "self", ".", "encoder", ".", "compute_tensor", "(", "x", ")", "decoding_output", "=", "self", ".", "decoder", ".", "compute_tensor", "(", "internal_variable", ")", "classification_output", "=", "self", ".", "classifier", ".", "compute_tensor", "(", "internal_variable", ")", "auto_encoder_cost", "=", "AutoEncoderCost", "(", "decoding_output", ",", "x", ")", ".", "get", "(", ")", "classification_cost", "=", "CrossEntropyCost", "(", "classification_output", ",", "self", ".", "target_input", ")", ".", "get", "(", ")", "final_cost", "=", "0.01", "*", "auto_encoder_cost", "+", "classification_cost", "error_rate", "=", "ErrorRateCost", "(", "classification_output", ",", "self", ".", "target_input", ")", ".", "get", "(", ")", "self", ".", "register_monitors", "(", "(", "\"err\"", ",", "error_rate", ")", ",", "(", "\"encoder_cost\"", ",", "auto_encoder_cost", ")", ",", "(", "\"classify_cost\"", ",", "classification_cost", ")", ")", "return", "final_cost" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
BasicDataset.map
Process all data with given function. The scheme of function should be x,y -> x,y.
deepy/dataset/basic.py
def map(self, func): """ Process all data with given function. The scheme of function should be x,y -> x,y. """ if self._train_set: self._train_set = map(func, self._train_set) if self._valid_set: self._valid_set = map(func, self._valid_set) if self._test_set: self._test_set = map(func, self._test_set)
def map(self, func): """ Process all data with given function. The scheme of function should be x,y -> x,y. """ if self._train_set: self._train_set = map(func, self._train_set) if self._valid_set: self._valid_set = map(func, self._valid_set) if self._test_set: self._test_set = map(func, self._test_set)
[ "Process", "all", "data", "with", "given", "function", ".", "The", "scheme", "of", "function", "should", "be", "x", "y", "-", ">", "x", "y", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/dataset/basic.py#L30-L40
[ "def", "map", "(", "self", ",", "func", ")", ":", "if", "self", ".", "_train_set", ":", "self", ".", "_train_set", "=", "map", "(", "func", ",", "self", ".", "_train_set", ")", "if", "self", ".", "_valid_set", ":", "self", ".", "_valid_set", "=", "map", "(", "func", ",", "self", ".", "_valid_set", ")", "if", "self", ".", "_test_set", ":", "self", ".", "_test_set", "=", "map", "(", "func", ",", "self", ".", "_test_set", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
BasicDataset.vectorize_target
Make targets be one-hot vectors.
deepy/dataset/basic.py
def vectorize_target(self, size): """ Make targets be one-hot vectors. """ if self._train_set: self._train_set = self._vectorize_set(self._train_set, size) if self._valid_set: self._valid_set = self._vectorize_set(self._valid_set, size) if self._test_set: self._test_set = self._vectorize_set(self._test_set, size)
def vectorize_target(self, size): """ Make targets be one-hot vectors. """ if self._train_set: self._train_set = self._vectorize_set(self._train_set, size) if self._valid_set: self._valid_set = self._vectorize_set(self._valid_set, size) if self._test_set: self._test_set = self._vectorize_set(self._test_set, size)
[ "Make", "targets", "be", "one", "-", "hot", "vectors", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/dataset/basic.py#L51-L60
[ "def", "vectorize_target", "(", "self", ",", "size", ")", ":", "if", "self", ".", "_train_set", ":", "self", ".", "_train_set", "=", "self", ".", "_vectorize_set", "(", "self", ".", "_train_set", ",", "size", ")", "if", "self", ".", "_valid_set", ":", "self", ".", "_valid_set", "=", "self", ".", "_vectorize_set", "(", "self", ".", "_valid_set", ",", "size", ")", "if", "self", ".", "_test_set", ":", "self", ".", "_test_set", "=", "self", ".", "_vectorize_set", "(", "self", ".", "_test_set", ",", "size", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
BasicDataset.report
Print dataset statistics.
deepy/dataset/basic.py
def report(self): """ Print dataset statistics. """ logging.info("%s train=%d valid=%d test=%d" % (self.__class__.__name__, len(list(self._train_set)) if self._train_set else 0, len(list(self._valid_set)) if self._valid_set else 0, len(list(self._test_set)) if self._test_set else 0))
def report(self): """ Print dataset statistics. """ logging.info("%s train=%d valid=%d test=%d" % (self.__class__.__name__, len(list(self._train_set)) if self._train_set else 0, len(list(self._valid_set)) if self._valid_set else 0, len(list(self._test_set)) if self._test_set else 0))
[ "Print", "dataset", "statistics", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/dataset/basic.py#L62-L69
[ "def", "report", "(", "self", ")", ":", "logging", ".", "info", "(", "\"%s train=%d valid=%d test=%d\"", "%", "(", "self", ".", "__class__", ".", "__name__", ",", "len", "(", "list", "(", "self", ".", "_train_set", ")", ")", "if", "self", ".", "_train_set", "else", "0", ",", "len", "(", "list", "(", "self", ".", "_valid_set", ")", ")", "if", "self", ".", "_valid_set", "else", "0", ",", "len", "(", "list", "(", "self", ".", "_test_set", ")", ")", "if", "self", ".", "_test_set", "else", "0", ")", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
CustomizeTrainer.train
We train over mini-batches and evaluate periodically.
deepy/trainers/customize_trainer.py
def train(self, train_set, valid_set=None, test_set=None, train_size=None): '''We train over mini-batches and evaluate periodically.''' iteration = 0 while True: if not iteration % self.config.test_frequency and test_set: try: self.test(iteration, test_set) except KeyboardInterrupt: logging.info('interrupted!') break if not iteration % self.validation_frequency and valid_set: try: if not self.evaluate(iteration, valid_set): logging.info('patience elapsed, bailing out') break except KeyboardInterrupt: logging.info('interrupted!') break train_message = "" try: train_message = self.train_func(train_set) except KeyboardInterrupt: logging.info('interrupted!') break if not iteration % self.config.monitor_frequency: logging.info('monitor (iter=%i) %s', iteration + 1, train_message) iteration += 1 if hasattr(self.network, "iteration_callback"): self.network.iteration_callback() yield train_message if valid_set: self.set_params(self.best_params) if test_set: self.test(0, test_set)
def train(self, train_set, valid_set=None, test_set=None, train_size=None): '''We train over mini-batches and evaluate periodically.''' iteration = 0 while True: if not iteration % self.config.test_frequency and test_set: try: self.test(iteration, test_set) except KeyboardInterrupt: logging.info('interrupted!') break if not iteration % self.validation_frequency and valid_set: try: if not self.evaluate(iteration, valid_set): logging.info('patience elapsed, bailing out') break except KeyboardInterrupt: logging.info('interrupted!') break train_message = "" try: train_message = self.train_func(train_set) except KeyboardInterrupt: logging.info('interrupted!') break if not iteration % self.config.monitor_frequency: logging.info('monitor (iter=%i) %s', iteration + 1, train_message) iteration += 1 if hasattr(self.network, "iteration_callback"): self.network.iteration_callback() yield train_message if valid_set: self.set_params(self.best_params) if test_set: self.test(0, test_set)
[ "We", "train", "over", "mini", "-", "batches", "and", "evaluate", "periodically", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/customize_trainer.py#L28-L66
[ "def", "train", "(", "self", ",", "train_set", ",", "valid_set", "=", "None", ",", "test_set", "=", "None", ",", "train_size", "=", "None", ")", ":", "iteration", "=", "0", "while", "True", ":", "if", "not", "iteration", "%", "self", ".", "config", ".", "test_frequency", "and", "test_set", ":", "try", ":", "self", ".", "test", "(", "iteration", ",", "test_set", ")", "except", "KeyboardInterrupt", ":", "logging", ".", "info", "(", "'interrupted!'", ")", "break", "if", "not", "iteration", "%", "self", ".", "validation_frequency", "and", "valid_set", ":", "try", ":", "if", "not", "self", ".", "evaluate", "(", "iteration", ",", "valid_set", ")", ":", "logging", ".", "info", "(", "'patience elapsed, bailing out'", ")", "break", "except", "KeyboardInterrupt", ":", "logging", ".", "info", "(", "'interrupted!'", ")", "break", "train_message", "=", "\"\"", "try", ":", "train_message", "=", "self", ".", "train_func", "(", "train_set", ")", "except", "KeyboardInterrupt", ":", "logging", ".", "info", "(", "'interrupted!'", ")", "break", "if", "not", "iteration", "%", "self", ".", "config", ".", "monitor_frequency", ":", "logging", ".", "info", "(", "'monitor (iter=%i) %s'", ",", "iteration", "+", "1", ",", "train_message", ")", "iteration", "+=", "1", "if", "hasattr", "(", "self", ".", "network", ",", "\"iteration_callback\"", ")", ":", "self", ".", "network", ".", "iteration_callback", "(", ")", "yield", "train_message", "if", "valid_set", ":", "self", ".", "set_params", "(", "self", ".", "best_params", ")", "if", "test_set", ":", "self", ".", "test", "(", "0", ",", "test_set", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralLM.sample
Sample outputs from LM.
examples/lm/lm.py
def sample(self, input, steps): """ Sample outputs from LM. """ inputs = [[onehot(self.input_dim, x) for x in input]] for _ in range(steps): target = self.compute(inputs)[0,-1].argmax() input.append(target) inputs[0].append(onehot(self.input_dim, target)) return input
def sample(self, input, steps): """ Sample outputs from LM. """ inputs = [[onehot(self.input_dim, x) for x in input]] for _ in range(steps): target = self.compute(inputs)[0,-1].argmax() input.append(target) inputs[0].append(onehot(self.input_dim, target)) return input
[ "Sample", "outputs", "from", "LM", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/examples/lm/lm.py#L60-L69
[ "def", "sample", "(", "self", ",", "input", ",", "steps", ")", ":", "inputs", "=", "[", "[", "onehot", "(", "self", ".", "input_dim", ",", "x", ")", "for", "x", "in", "input", "]", "]", "for", "_", "in", "range", "(", "steps", ")", ":", "target", "=", "self", ".", "compute", "(", "inputs", ")", "[", "0", ",", "-", "1", "]", ".", "argmax", "(", ")", "input", ".", "append", "(", "target", ")", "inputs", "[", "0", "]", ".", "append", "(", "onehot", "(", "self", ".", "input_dim", ",", "target", ")", ")", "return", "input" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
ClassOutputLayer.compute_tensor
:param x: (batch, time, vec)
examples/lm/layers.py
def compute_tensor(self, x): """ :param x: (batch, time, vec) """ # Target class class_matrix = self.target_tensor // self.output_size class_vector = class_matrix.reshape((-1,)) # Target index target_matrix = self.target_tensor % self.output_size target_vector = target_matrix.reshape((-1,)) # Input matrix input_matrix = x.reshape((-1, self.input_dim)) # Output matrix output_tensor3d = self.output_layer.compute_tensor(x) output_matrix = output_tensor3d.reshape((-1, self.class_size, self.output_size)) arange_vec = self.arange_cache[:output_matrix.shape[0]] sub_output_matrix = output_matrix[arange_vec, class_vector] # Softmax softmax_output_matrix = self.softmax_layer.compute_tensor(sub_output_matrix) # Class prediction class_output_matrix = self.class_layer.compute_tensor(x) # Costs output_cost = LMCost(softmax_output_matrix, target_vector).get() class_cost = LMCost(class_output_matrix, class_matrix).get() final_cost = output_cost + class_cost return final_cost
def compute_tensor(self, x): """ :param x: (batch, time, vec) """ # Target class class_matrix = self.target_tensor // self.output_size class_vector = class_matrix.reshape((-1,)) # Target index target_matrix = self.target_tensor % self.output_size target_vector = target_matrix.reshape((-1,)) # Input matrix input_matrix = x.reshape((-1, self.input_dim)) # Output matrix output_tensor3d = self.output_layer.compute_tensor(x) output_matrix = output_tensor3d.reshape((-1, self.class_size, self.output_size)) arange_vec = self.arange_cache[:output_matrix.shape[0]] sub_output_matrix = output_matrix[arange_vec, class_vector] # Softmax softmax_output_matrix = self.softmax_layer.compute_tensor(sub_output_matrix) # Class prediction class_output_matrix = self.class_layer.compute_tensor(x) # Costs output_cost = LMCost(softmax_output_matrix, target_vector).get() class_cost = LMCost(class_output_matrix, class_matrix).get() final_cost = output_cost + class_cost return final_cost
[ ":", "param", "x", ":", "(", "batch", "time", "vec", ")" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/examples/lm/layers.py#L49-L75
[ "def", "compute_tensor", "(", "self", ",", "x", ")", ":", "# Target class", "class_matrix", "=", "self", ".", "target_tensor", "//", "self", ".", "output_size", "class_vector", "=", "class_matrix", ".", "reshape", "(", "(", "-", "1", ",", ")", ")", "# Target index", "target_matrix", "=", "self", ".", "target_tensor", "%", "self", ".", "output_size", "target_vector", "=", "target_matrix", ".", "reshape", "(", "(", "-", "1", ",", ")", ")", "# Input matrix", "input_matrix", "=", "x", ".", "reshape", "(", "(", "-", "1", ",", "self", ".", "input_dim", ")", ")", "# Output matrix", "output_tensor3d", "=", "self", ".", "output_layer", ".", "compute_tensor", "(", "x", ")", "output_matrix", "=", "output_tensor3d", ".", "reshape", "(", "(", "-", "1", ",", "self", ".", "class_size", ",", "self", ".", "output_size", ")", ")", "arange_vec", "=", "self", ".", "arange_cache", "[", ":", "output_matrix", ".", "shape", "[", "0", "]", "]", "sub_output_matrix", "=", "output_matrix", "[", "arange_vec", ",", "class_vector", "]", "# Softmax", "softmax_output_matrix", "=", "self", ".", "softmax_layer", ".", "compute_tensor", "(", "sub_output_matrix", ")", "# Class prediction", "class_output_matrix", "=", "self", ".", "class_layer", ".", "compute_tensor", "(", "x", ")", "# Costs", "output_cost", "=", "LMCost", "(", "softmax_output_matrix", ",", "target_vector", ")", ".", "get", "(", ")", "class_cost", "=", "LMCost", "(", "class_output_matrix", ",", "class_matrix", ")", ".", "get", "(", ")", "final_cost", "=", "output_cost", "+", "class_cost", "return", "final_cost" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
Attention.compute_alignments
Compute the alignment weights based on the previous state.
deepy/layers/attention.py
def compute_alignments(self, prev_state, precomputed_values, mask=None): """ Compute the alignment weights based on the previous state. """ WaSp = T.dot(prev_state, self.Wa) UaH = precomputed_values # For test time the UaH will be (time, output_dim) if UaH.ndim == 2: preact = WaSp[:, None, :] + UaH[None, :, :] else: preact = WaSp[:, None, :] + UaH act = T.activate(preact, 'tanh') align_scores = T.dot(act, self.Va) # ~ (batch, time) if mask: mask = (1 - mask) * -99.00 if align_scores.ndim == 3: align_scores += mask[None, :] else: align_scores += mask align_weights = T.nnet.softmax(align_scores) return align_weights
def compute_alignments(self, prev_state, precomputed_values, mask=None): """ Compute the alignment weights based on the previous state. """ WaSp = T.dot(prev_state, self.Wa) UaH = precomputed_values # For test time the UaH will be (time, output_dim) if UaH.ndim == 2: preact = WaSp[:, None, :] + UaH[None, :, :] else: preact = WaSp[:, None, :] + UaH act = T.activate(preact, 'tanh') align_scores = T.dot(act, self.Va) # ~ (batch, time) if mask: mask = (1 - mask) * -99.00 if align_scores.ndim == 3: align_scores += mask[None, :] else: align_scores += mask align_weights = T.nnet.softmax(align_scores) return align_weights
[ "Compute", "the", "alignment", "weights", "based", "on", "the", "previous", "state", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/layers/attention.py#L29-L50
[ "def", "compute_alignments", "(", "self", ",", "prev_state", ",", "precomputed_values", ",", "mask", "=", "None", ")", ":", "WaSp", "=", "T", ".", "dot", "(", "prev_state", ",", "self", ".", "Wa", ")", "UaH", "=", "precomputed_values", "# For test time the UaH will be (time, output_dim)", "if", "UaH", ".", "ndim", "==", "2", ":", "preact", "=", "WaSp", "[", ":", ",", "None", ",", ":", "]", "+", "UaH", "[", "None", ",", ":", ",", ":", "]", "else", ":", "preact", "=", "WaSp", "[", ":", ",", "None", ",", ":", "]", "+", "UaH", "act", "=", "T", ".", "activate", "(", "preact", ",", "'tanh'", ")", "align_scores", "=", "T", ".", "dot", "(", "act", ",", "self", ".", "Va", ")", "# ~ (batch, time)", "if", "mask", ":", "mask", "=", "(", "1", "-", "mask", ")", "*", "-", "99.00", "if", "align_scores", ".", "ndim", "==", "3", ":", "align_scores", "+=", "mask", "[", "None", ",", ":", "]", "else", ":", "align_scores", "+=", "mask", "align_weights", "=", "T", ".", "nnet", ".", "softmax", "(", "align_scores", ")", "return", "align_weights" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
Attention.compute_context_vector
Compute the context vector with soft attention.
deepy/layers/attention.py
def compute_context_vector(self, prev_state, inputs, precomputed_values=None, mask=None): """ Compute the context vector with soft attention. """ precomputed_values = precomputed_values if precomputed_values else self.precompute(inputs) align_weights = self.compute_alignments(prev_state, precomputed_values, mask) context_vector = T.sum(align_weights[:, :, None] * inputs, axis=1) return context_vector
def compute_context_vector(self, prev_state, inputs, precomputed_values=None, mask=None): """ Compute the context vector with soft attention. """ precomputed_values = precomputed_values if precomputed_values else self.precompute(inputs) align_weights = self.compute_alignments(prev_state, precomputed_values, mask) context_vector = T.sum(align_weights[:, :, None] * inputs, axis=1) return context_vector
[ "Compute", "the", "context", "vector", "with", "soft", "attention", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/layers/attention.py#L52-L59
[ "def", "compute_context_vector", "(", "self", ",", "prev_state", ",", "inputs", ",", "precomputed_values", "=", "None", ",", "mask", "=", "None", ")", ":", "precomputed_values", "=", "precomputed_values", "if", "precomputed_values", "else", "self", ".", "precompute", "(", "inputs", ")", "align_weights", "=", "self", ".", "compute_alignments", "(", "prev_state", ",", "precomputed_values", ",", "mask", ")", "context_vector", "=", "T", ".", "sum", "(", "align_weights", "[", ":", ",", ":", ",", "None", "]", "*", "inputs", ",", "axis", "=", "1", ")", "return", "context_vector" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
MultiGPUTrainer.train
Train the model in multi-GPU environment.
deepy/multigpu/worker.py
def train(self, train_set, valid_set=None, test_set=None, train_size=None): """ Train the model in multi-GPU environment. """ from platoon.channel import Worker from platoon.param_sync import EASGD, ASGD server_port = self._port param_map = self.create_param_map() # Initialize the worker worker = Worker(control_port=server_port) if self.config.learning_rate: worker.send_req({'init_schedule': self._schedule_params}) self.sync_hyperparams(worker.send_req('sync_hyperparams')['sync_hyperparams']) easgd_alpha = worker.send_req('get_easgd_alpha') if self._using_easgd: self.logger.info("using EASGD with alpha={}".format(easgd_alpha)) else: self.logger.info("using ASGD rule") rule = EASGD(easgd_alpha) if self._using_easgd else ASGD() worker.init_shared_params(param_map.values(), param_sync_rule=rule) worker.send_req({ "set_names": None, "training_names": self.training_names, "evaluation_names": self.evaluation_names }) # Load all training batches, consume vast memory here self.logger.info("started process {}".format(os.getpid())) self.logger.info("(proc {}) load training data".format(os.getpid())) train_batches = list(train_set) network_callback = bool(self.network.training_callbacks) trainer_callback = bool(self._iter_controllers) # Start from valid, so the performance when a worked join can be known worker.copy_to_local() if valid_set: self._run_valid(self.epoch, valid_set, dry_run=True) self.fix_costs() worker.send_req({ "valid_done": None, "valid_costs": self.last_run_costs, "auto_save": self.config.auto_save }) worker.copy_to_local() # Begin the loop while True: resp = worker.send_req('next') if resp == 'stop': break elif resp == 'wait': time.sleep(1) elif resp == 'get_num_batches': worker.send_req({'get_num_batches_done': len(train_batches)}) elif 'eval' in resp: self.best_cost = resp['best_valid_cost'] worker.copy_to_local() valid_costs = None test_costs = None if valid_set: self._run_valid(self.epoch, valid_set) self.fix_costs() valid_costs = self.last_run_costs if test_set: self._run_test(self.epoch, test_set) self.fix_costs() test_costs = self.last_run_costs worker.send_req({ "eval_done": None, "valid_costs": valid_costs, "test_costs": test_costs, "auto_save": self.config.auto_save }) elif 'valid' in resp: self.best_cost = resp['best_valid_cost'] worker.copy_to_local() if valid_set: self._run_valid(self.epoch, valid_set, dry_run=True) self.fix_costs() worker.send_req({ "valid_done": None, "valid_costs": self.last_run_costs, "auto_save": self.config.auto_save }) elif 'train' in resp: batch_ids = resp['train'] batch_costs = [[] for _ in self.training_names] for batch_id in batch_ids: x = train_batches[batch_id] cost_x = self.learn(*x) for i, cost in enumerate(cost_x): batch_costs[i].append(cost) self.last_cost = cost_x[0] if network_callback: self.network.training_callback() if trainer_callback: for func in self._iter_controllers: func(self) worker.sync_params(synchronous=True) worker.send_req({'train_done': None, 'costs': [float(np.mean(c)) for c in batch_costs]}) elif 'sync_hyperparams' in resp: self.sync_hyperparams(resp['sync_hyperparams']) worker.close() return []
def train(self, train_set, valid_set=None, test_set=None, train_size=None): """ Train the model in multi-GPU environment. """ from platoon.channel import Worker from platoon.param_sync import EASGD, ASGD server_port = self._port param_map = self.create_param_map() # Initialize the worker worker = Worker(control_port=server_port) if self.config.learning_rate: worker.send_req({'init_schedule': self._schedule_params}) self.sync_hyperparams(worker.send_req('sync_hyperparams')['sync_hyperparams']) easgd_alpha = worker.send_req('get_easgd_alpha') if self._using_easgd: self.logger.info("using EASGD with alpha={}".format(easgd_alpha)) else: self.logger.info("using ASGD rule") rule = EASGD(easgd_alpha) if self._using_easgd else ASGD() worker.init_shared_params(param_map.values(), param_sync_rule=rule) worker.send_req({ "set_names": None, "training_names": self.training_names, "evaluation_names": self.evaluation_names }) # Load all training batches, consume vast memory here self.logger.info("started process {}".format(os.getpid())) self.logger.info("(proc {}) load training data".format(os.getpid())) train_batches = list(train_set) network_callback = bool(self.network.training_callbacks) trainer_callback = bool(self._iter_controllers) # Start from valid, so the performance when a worked join can be known worker.copy_to_local() if valid_set: self._run_valid(self.epoch, valid_set, dry_run=True) self.fix_costs() worker.send_req({ "valid_done": None, "valid_costs": self.last_run_costs, "auto_save": self.config.auto_save }) worker.copy_to_local() # Begin the loop while True: resp = worker.send_req('next') if resp == 'stop': break elif resp == 'wait': time.sleep(1) elif resp == 'get_num_batches': worker.send_req({'get_num_batches_done': len(train_batches)}) elif 'eval' in resp: self.best_cost = resp['best_valid_cost'] worker.copy_to_local() valid_costs = None test_costs = None if valid_set: self._run_valid(self.epoch, valid_set) self.fix_costs() valid_costs = self.last_run_costs if test_set: self._run_test(self.epoch, test_set) self.fix_costs() test_costs = self.last_run_costs worker.send_req({ "eval_done": None, "valid_costs": valid_costs, "test_costs": test_costs, "auto_save": self.config.auto_save }) elif 'valid' in resp: self.best_cost = resp['best_valid_cost'] worker.copy_to_local() if valid_set: self._run_valid(self.epoch, valid_set, dry_run=True) self.fix_costs() worker.send_req({ "valid_done": None, "valid_costs": self.last_run_costs, "auto_save": self.config.auto_save }) elif 'train' in resp: batch_ids = resp['train'] batch_costs = [[] for _ in self.training_names] for batch_id in batch_ids: x = train_batches[batch_id] cost_x = self.learn(*x) for i, cost in enumerate(cost_x): batch_costs[i].append(cost) self.last_cost = cost_x[0] if network_callback: self.network.training_callback() if trainer_callback: for func in self._iter_controllers: func(self) worker.sync_params(synchronous=True) worker.send_req({'train_done': None, 'costs': [float(np.mean(c)) for c in batch_costs]}) elif 'sync_hyperparams' in resp: self.sync_hyperparams(resp['sync_hyperparams']) worker.close() return []
[ "Train", "the", "model", "in", "multi", "-", "GPU", "environment", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/multigpu/worker.py#L59-L159
[ "def", "train", "(", "self", ",", "train_set", ",", "valid_set", "=", "None", ",", "test_set", "=", "None", ",", "train_size", "=", "None", ")", ":", "from", "platoon", ".", "channel", "import", "Worker", "from", "platoon", ".", "param_sync", "import", "EASGD", ",", "ASGD", "server_port", "=", "self", ".", "_port", "param_map", "=", "self", ".", "create_param_map", "(", ")", "# Initialize the worker", "worker", "=", "Worker", "(", "control_port", "=", "server_port", ")", "if", "self", ".", "config", ".", "learning_rate", ":", "worker", ".", "send_req", "(", "{", "'init_schedule'", ":", "self", ".", "_schedule_params", "}", ")", "self", ".", "sync_hyperparams", "(", "worker", ".", "send_req", "(", "'sync_hyperparams'", ")", "[", "'sync_hyperparams'", "]", ")", "easgd_alpha", "=", "worker", ".", "send_req", "(", "'get_easgd_alpha'", ")", "if", "self", ".", "_using_easgd", ":", "self", ".", "logger", ".", "info", "(", "\"using EASGD with alpha={}\"", ".", "format", "(", "easgd_alpha", ")", ")", "else", ":", "self", ".", "logger", ".", "info", "(", "\"using ASGD rule\"", ")", "rule", "=", "EASGD", "(", "easgd_alpha", ")", "if", "self", ".", "_using_easgd", "else", "ASGD", "(", ")", "worker", ".", "init_shared_params", "(", "param_map", ".", "values", "(", ")", ",", "param_sync_rule", "=", "rule", ")", "worker", ".", "send_req", "(", "{", "\"set_names\"", ":", "None", ",", "\"training_names\"", ":", "self", ".", "training_names", ",", "\"evaluation_names\"", ":", "self", ".", "evaluation_names", "}", ")", "# Load all training batches, consume vast memory here", "self", ".", "logger", ".", "info", "(", "\"started process {}\"", ".", "format", "(", "os", ".", "getpid", "(", ")", ")", ")", "self", ".", "logger", ".", "info", "(", "\"(proc {}) load training data\"", ".", "format", "(", "os", ".", "getpid", "(", ")", ")", ")", "train_batches", "=", "list", "(", "train_set", ")", "network_callback", "=", "bool", "(", "self", ".", "network", ".", "training_callbacks", ")", "trainer_callback", "=", "bool", "(", "self", ".", "_iter_controllers", ")", "# Start from valid, so the performance when a worked join can be known", "worker", ".", "copy_to_local", "(", ")", "if", "valid_set", ":", "self", ".", "_run_valid", "(", "self", ".", "epoch", ",", "valid_set", ",", "dry_run", "=", "True", ")", "self", ".", "fix_costs", "(", ")", "worker", ".", "send_req", "(", "{", "\"valid_done\"", ":", "None", ",", "\"valid_costs\"", ":", "self", ".", "last_run_costs", ",", "\"auto_save\"", ":", "self", ".", "config", ".", "auto_save", "}", ")", "worker", ".", "copy_to_local", "(", ")", "# Begin the loop", "while", "True", ":", "resp", "=", "worker", ".", "send_req", "(", "'next'", ")", "if", "resp", "==", "'stop'", ":", "break", "elif", "resp", "==", "'wait'", ":", "time", ".", "sleep", "(", "1", ")", "elif", "resp", "==", "'get_num_batches'", ":", "worker", ".", "send_req", "(", "{", "'get_num_batches_done'", ":", "len", "(", "train_batches", ")", "}", ")", "elif", "'eval'", "in", "resp", ":", "self", ".", "best_cost", "=", "resp", "[", "'best_valid_cost'", "]", "worker", ".", "copy_to_local", "(", ")", "valid_costs", "=", "None", "test_costs", "=", "None", "if", "valid_set", ":", "self", ".", "_run_valid", "(", "self", ".", "epoch", ",", "valid_set", ")", "self", ".", "fix_costs", "(", ")", "valid_costs", "=", "self", ".", "last_run_costs", "if", "test_set", ":", "self", ".", "_run_test", "(", "self", ".", "epoch", ",", "test_set", ")", "self", ".", "fix_costs", "(", ")", "test_costs", "=", "self", ".", "last_run_costs", "worker", ".", "send_req", "(", "{", "\"eval_done\"", ":", "None", ",", "\"valid_costs\"", ":", "valid_costs", ",", "\"test_costs\"", ":", "test_costs", ",", "\"auto_save\"", ":", "self", ".", "config", ".", "auto_save", "}", ")", "elif", "'valid'", "in", "resp", ":", "self", ".", "best_cost", "=", "resp", "[", "'best_valid_cost'", "]", "worker", ".", "copy_to_local", "(", ")", "if", "valid_set", ":", "self", ".", "_run_valid", "(", "self", ".", "epoch", ",", "valid_set", ",", "dry_run", "=", "True", ")", "self", ".", "fix_costs", "(", ")", "worker", ".", "send_req", "(", "{", "\"valid_done\"", ":", "None", ",", "\"valid_costs\"", ":", "self", ".", "last_run_costs", ",", "\"auto_save\"", ":", "self", ".", "config", ".", "auto_save", "}", ")", "elif", "'train'", "in", "resp", ":", "batch_ids", "=", "resp", "[", "'train'", "]", "batch_costs", "=", "[", "[", "]", "for", "_", "in", "self", ".", "training_names", "]", "for", "batch_id", "in", "batch_ids", ":", "x", "=", "train_batches", "[", "batch_id", "]", "cost_x", "=", "self", ".", "learn", "(", "*", "x", ")", "for", "i", ",", "cost", "in", "enumerate", "(", "cost_x", ")", ":", "batch_costs", "[", "i", "]", ".", "append", "(", "cost", ")", "self", ".", "last_cost", "=", "cost_x", "[", "0", "]", "if", "network_callback", ":", "self", ".", "network", ".", "training_callback", "(", ")", "if", "trainer_callback", ":", "for", "func", "in", "self", ".", "_iter_controllers", ":", "func", "(", "self", ")", "worker", ".", "sync_params", "(", "synchronous", "=", "True", ")", "worker", ".", "send_req", "(", "{", "'train_done'", ":", "None", ",", "'costs'", ":", "[", "float", "(", "np", ".", "mean", "(", "c", ")", ")", "for", "c", "in", "batch_costs", "]", "}", ")", "elif", "'sync_hyperparams'", "in", "resp", ":", "self", ".", "sync_hyperparams", "(", "resp", "[", "'sync_hyperparams'", "]", ")", "worker", ".", "close", "(", ")", "return", "[", "]" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
concatenate
A utility function of concatenate.
deepy/tensor/functions.py
def concatenate(vars, axis=-1): """ A utility function of concatenate. """ from deepy.core.neural_var import NeuralVariable if isinstance(vars[0], NeuralVariable): concat_var = Concatenate(axis=axis).compute(*vars) if axis == -1 or axis == vars[0].tensor.ndim - 1: concat_var.output_dim = sum([x.output_dim for x in vars], 0) else: concat_var = TT.concatenate(vars, axis) return concat_var
def concatenate(vars, axis=-1): """ A utility function of concatenate. """ from deepy.core.neural_var import NeuralVariable if isinstance(vars[0], NeuralVariable): concat_var = Concatenate(axis=axis).compute(*vars) if axis == -1 or axis == vars[0].tensor.ndim - 1: concat_var.output_dim = sum([x.output_dim for x in vars], 0) else: concat_var = TT.concatenate(vars, axis) return concat_var
[ "A", "utility", "function", "of", "concatenate", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/tensor/functions.py#L38-L49
[ "def", "concatenate", "(", "vars", ",", "axis", "=", "-", "1", ")", ":", "from", "deepy", ".", "core", ".", "neural_var", "import", "NeuralVariable", "if", "isinstance", "(", "vars", "[", "0", "]", ",", "NeuralVariable", ")", ":", "concat_var", "=", "Concatenate", "(", "axis", "=", "axis", ")", ".", "compute", "(", "*", "vars", ")", "if", "axis", "==", "-", "1", "or", "axis", "==", "vars", "[", "0", "]", ".", "tensor", ".", "ndim", "-", "1", ":", "concat_var", ".", "output_dim", "=", "sum", "(", "[", "x", ".", "output_dim", "for", "x", "in", "vars", "]", ",", "0", ")", "else", ":", "concat_var", "=", "TT", ".", "concatenate", "(", "vars", ",", "axis", ")", "return", "concat_var" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
var
Wrap a Theano tensor into the variable for defining neural network. :param last_dim: last dimension of tensor, 0 indicates that the last dimension is flexible :rtype: deepy.core.neural_var.NeuralVariable
deepy/tensor/functions.py
def var(tensor_type, last_dim=0, test_shape=None): """ Wrap a Theano tensor into the variable for defining neural network. :param last_dim: last dimension of tensor, 0 indicates that the last dimension is flexible :rtype: deepy.core.neural_var.NeuralVariable """ # Create tensor from deepy.core.neural_var import NeuralVariable from deepy.core.env import env from theano.tensor.var import TensorVariable if isinstance(tensor_type, NeuralVariable): var = tensor_type if last_dim != 0: var.output_dim = last_dim elif isinstance(tensor_type, TensorVariable): var = NeuralVariable(tensor_type, dim=last_dim) elif isinstance(tensor_type, str): theano_tensor = getattr(TT, tensor_type)() var = NeuralVariable(theano_tensor, dim=last_dim) else: raise Exception("tensor_type shall be a string or a NeuralVariable") # Set test value if test_shape: if type(test_shape) != list and type(test_shape) != tuple: # May be it's a value var.set_test_value(test_shape) else: test_val = env.numpy_rand.rand(*test_shape) if len(test_shape) > 0: test_val = test_val.astype(var.tensor.dtype) elif var.tensor.dtype.startswith("int"): test_val = 1 var.set_test_value(test_val) else: # Create a general test_shape dims = [(d + 1) * 3 for d in range(var.tensor.ndim)] if var.dim() != 0: dims[-1] = var.dim() test_val = env.numpy_rand.rand(*dims) if len(dims) > 0: test_val = test_val.astype(var.tensor.dtype) elif var.tensor.dtype.startswith("int"): test_val = 1 var.set_test_value(test_val) return var
def var(tensor_type, last_dim=0, test_shape=None): """ Wrap a Theano tensor into the variable for defining neural network. :param last_dim: last dimension of tensor, 0 indicates that the last dimension is flexible :rtype: deepy.core.neural_var.NeuralVariable """ # Create tensor from deepy.core.neural_var import NeuralVariable from deepy.core.env import env from theano.tensor.var import TensorVariable if isinstance(tensor_type, NeuralVariable): var = tensor_type if last_dim != 0: var.output_dim = last_dim elif isinstance(tensor_type, TensorVariable): var = NeuralVariable(tensor_type, dim=last_dim) elif isinstance(tensor_type, str): theano_tensor = getattr(TT, tensor_type)() var = NeuralVariable(theano_tensor, dim=last_dim) else: raise Exception("tensor_type shall be a string or a NeuralVariable") # Set test value if test_shape: if type(test_shape) != list and type(test_shape) != tuple: # May be it's a value var.set_test_value(test_shape) else: test_val = env.numpy_rand.rand(*test_shape) if len(test_shape) > 0: test_val = test_val.astype(var.tensor.dtype) elif var.tensor.dtype.startswith("int"): test_val = 1 var.set_test_value(test_val) else: # Create a general test_shape dims = [(d + 1) * 3 for d in range(var.tensor.ndim)] if var.dim() != 0: dims[-1] = var.dim() test_val = env.numpy_rand.rand(*dims) if len(dims) > 0: test_val = test_val.astype(var.tensor.dtype) elif var.tensor.dtype.startswith("int"): test_val = 1 var.set_test_value(test_val) return var
[ "Wrap", "a", "Theano", "tensor", "into", "the", "variable", "for", "defining", "neural", "network", ".", ":", "param", "last_dim", ":", "last", "dimension", "of", "tensor", "0", "indicates", "that", "the", "last", "dimension", "is", "flexible", ":", "rtype", ":", "deepy", ".", "core", ".", "neural_var", ".", "NeuralVariable" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/tensor/functions.py#L72-L116
[ "def", "var", "(", "tensor_type", ",", "last_dim", "=", "0", ",", "test_shape", "=", "None", ")", ":", "# Create tensor", "from", "deepy", ".", "core", ".", "neural_var", "import", "NeuralVariable", "from", "deepy", ".", "core", ".", "env", "import", "env", "from", "theano", ".", "tensor", ".", "var", "import", "TensorVariable", "if", "isinstance", "(", "tensor_type", ",", "NeuralVariable", ")", ":", "var", "=", "tensor_type", "if", "last_dim", "!=", "0", ":", "var", ".", "output_dim", "=", "last_dim", "elif", "isinstance", "(", "tensor_type", ",", "TensorVariable", ")", ":", "var", "=", "NeuralVariable", "(", "tensor_type", ",", "dim", "=", "last_dim", ")", "elif", "isinstance", "(", "tensor_type", ",", "str", ")", ":", "theano_tensor", "=", "getattr", "(", "TT", ",", "tensor_type", ")", "(", ")", "var", "=", "NeuralVariable", "(", "theano_tensor", ",", "dim", "=", "last_dim", ")", "else", ":", "raise", "Exception", "(", "\"tensor_type shall be a string or a NeuralVariable\"", ")", "# Set test value", "if", "test_shape", ":", "if", "type", "(", "test_shape", ")", "!=", "list", "and", "type", "(", "test_shape", ")", "!=", "tuple", ":", "# May be it's a value", "var", ".", "set_test_value", "(", "test_shape", ")", "else", ":", "test_val", "=", "env", ".", "numpy_rand", ".", "rand", "(", "*", "test_shape", ")", "if", "len", "(", "test_shape", ")", ">", "0", ":", "test_val", "=", "test_val", ".", "astype", "(", "var", ".", "tensor", ".", "dtype", ")", "elif", "var", ".", "tensor", ".", "dtype", ".", "startswith", "(", "\"int\"", ")", ":", "test_val", "=", "1", "var", ".", "set_test_value", "(", "test_val", ")", "else", ":", "# Create a general test_shape", "dims", "=", "[", "(", "d", "+", "1", ")", "*", "3", "for", "d", "in", "range", "(", "var", ".", "tensor", ".", "ndim", ")", "]", "if", "var", ".", "dim", "(", ")", "!=", "0", ":", "dims", "[", "-", "1", "]", "=", "var", ".", "dim", "(", ")", "test_val", "=", "env", ".", "numpy_rand", ".", "rand", "(", "*", "dims", ")", "if", "len", "(", "dims", ")", ">", "0", ":", "test_val", "=", "test_val", ".", "astype", "(", "var", ".", "tensor", ".", "dtype", ")", "elif", "var", ".", "tensor", ".", "dtype", ".", "startswith", "(", "\"int\"", ")", ":", "test_val", "=", "1", "var", ".", "set_test_value", "(", "test_val", ")", "return", "var" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
SequentialDataset._pad
Pad sequences to given length in the left or right side.
deepy/dataset/sequence.py
def _pad(self, side, length): """ Pad sequences to given length in the left or right side. """ if self._train_set: self._train_set = pad_dataset(self._train_set, side, length) if self._valid_set: self._valid_set = pad_dataset(self._valid_set, side, length) if self._test_set: self._test_set = pad_dataset(self._test_set, side, length)
def _pad(self, side, length): """ Pad sequences to given length in the left or right side. """ if self._train_set: self._train_set = pad_dataset(self._train_set, side, length) if self._valid_set: self._valid_set = pad_dataset(self._valid_set, side, length) if self._test_set: self._test_set = pad_dataset(self._test_set, side, length)
[ "Pad", "sequences", "to", "given", "length", "in", "the", "left", "or", "right", "side", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/dataset/sequence.py#L15-L24
[ "def", "_pad", "(", "self", ",", "side", ",", "length", ")", ":", "if", "self", ".", "_train_set", ":", "self", ".", "_train_set", "=", "pad_dataset", "(", "self", ".", "_train_set", ",", "side", ",", "length", ")", "if", "self", ".", "_valid_set", ":", "self", ".", "_valid_set", "=", "pad_dataset", "(", "self", ".", "_valid_set", ",", "side", ",", "length", ")", "if", "self", ".", "_test_set", ":", "self", ".", "_test_set", "=", "pad_dataset", "(", "self", ".", "_test_set", ",", "side", ",", "length", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
rmsprop_core
RMSPROP optimization core.
deepy/trainers/cores/rmsprop.py
def rmsprop_core(params, gradients, momentum=0.9, learning_rate=0.01): """ RMSPROP optimization core. """ for param, grad in zip(params, gradients): rms_ = theano.shared(np.zeros_like(param.get_value()), name=param.name + '_rms') rms = momentum * rms_ + (1 - momentum) * grad * grad yield rms_, rms yield param, param - learning_rate * grad / T.sqrt(rms + 1e-8)
def rmsprop_core(params, gradients, momentum=0.9, learning_rate=0.01): """ RMSPROP optimization core. """ for param, grad in zip(params, gradients): rms_ = theano.shared(np.zeros_like(param.get_value()), name=param.name + '_rms') rms = momentum * rms_ + (1 - momentum) * grad * grad yield rms_, rms yield param, param - learning_rate * grad / T.sqrt(rms + 1e-8)
[ "RMSPROP", "optimization", "core", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/cores/rmsprop.py#L8-L16
[ "def", "rmsprop_core", "(", "params", ",", "gradients", ",", "momentum", "=", "0.9", ",", "learning_rate", "=", "0.01", ")", ":", "for", "param", ",", "grad", "in", "zip", "(", "params", ",", "gradients", ")", ":", "rms_", "=", "theano", ".", "shared", "(", "np", ".", "zeros_like", "(", "param", ".", "get_value", "(", ")", ")", ",", "name", "=", "param", ".", "name", "+", "'_rms'", ")", "rms", "=", "momentum", "*", "rms_", "+", "(", "1", "-", "momentum", ")", "*", "grad", "*", "grad", "yield", "rms_", ",", "rms", "yield", "param", ",", "param", "-", "learning_rate", "*", "grad", "/", "T", ".", "sqrt", "(", "rms", "+", "1e-8", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
pad_dataset
Pad data set to specified length. Parameters: length - max length, a just to the max length in the batch if length is -1
deepy/dataset/padding.py
def pad_dataset(subset, side="right", length=-1): """ Pad data set to specified length. Parameters: length - max length, a just to the max length in the batch if length is -1 """ assert length == -1 or length > 0 if type(subset[0][0][0]) in [float, int, np.int64, np.int32, np.float32]: return _pad_2d(subset, side, length) else: return _pad_3d(subset, side, length)
def pad_dataset(subset, side="right", length=-1): """ Pad data set to specified length. Parameters: length - max length, a just to the max length in the batch if length is -1 """ assert length == -1 or length > 0 if type(subset[0][0][0]) in [float, int, np.int64, np.int32, np.float32]: return _pad_2d(subset, side, length) else: return _pad_3d(subset, side, length)
[ "Pad", "data", "set", "to", "specified", "length", ".", "Parameters", ":", "length", "-", "max", "length", "a", "just", "to", "the", "max", "length", "in", "the", "batch", "if", "length", "is", "-", "1" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/dataset/padding.py#L7-L17
[ "def", "pad_dataset", "(", "subset", ",", "side", "=", "\"right\"", ",", "length", "=", "-", "1", ")", ":", "assert", "length", "==", "-", "1", "or", "length", ">", "0", "if", "type", "(", "subset", "[", "0", "]", "[", "0", "]", "[", "0", "]", ")", "in", "[", "float", ",", "int", ",", "np", ".", "int64", ",", "np", ".", "int32", ",", "np", ".", "float32", "]", ":", "return", "_pad_2d", "(", "subset", ",", "side", ",", "length", ")", "else", ":", "return", "_pad_3d", "(", "subset", ",", "side", ",", "length", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
ScheduledTrainingServer.prepare_epoch
Prepare for one epoch. Returns: bool: False if to stop the training.
deepy/multigpu/server.py
def prepare_epoch(self): """ Prepare for one epoch. Returns: bool: False if to stop the training. """ self.epoch += 1 if self.epoch >= self.epoch_start_halving and ((self.epoch - self.epoch_start_halving) % self._halving_freq == 0): self._lr *= 0.5 self._current_iter = 0 self._iters_from_last_valid = 0 self._train_costs = [] self.prepared_worker_pool.clear() self.batch_pool = range(self.num_train_batches) self.rand.shuffle(self.batch_pool) if self.epoch > self.end_at: self.log("Training is done, wait all workers to stop") return False else: self.log("start epoch {} with lr={}".format(self.epoch, self._lr)) return True
def prepare_epoch(self): """ Prepare for one epoch. Returns: bool: False if to stop the training. """ self.epoch += 1 if self.epoch >= self.epoch_start_halving and ((self.epoch - self.epoch_start_halving) % self._halving_freq == 0): self._lr *= 0.5 self._current_iter = 0 self._iters_from_last_valid = 0 self._train_costs = [] self.prepared_worker_pool.clear() self.batch_pool = range(self.num_train_batches) self.rand.shuffle(self.batch_pool) if self.epoch > self.end_at: self.log("Training is done, wait all workers to stop") return False else: self.log("start epoch {} with lr={}".format(self.epoch, self._lr)) return True
[ "Prepare", "for", "one", "epoch", ".", "Returns", ":", "bool", ":", "False", "if", "to", "stop", "the", "training", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/multigpu/server.py#L71-L91
[ "def", "prepare_epoch", "(", "self", ")", ":", "self", ".", "epoch", "+=", "1", "if", "self", ".", "epoch", ">=", "self", ".", "epoch_start_halving", "and", "(", "(", "self", ".", "epoch", "-", "self", ".", "epoch_start_halving", ")", "%", "self", ".", "_halving_freq", "==", "0", ")", ":", "self", ".", "_lr", "*=", "0.5", "self", ".", "_current_iter", "=", "0", "self", ".", "_iters_from_last_valid", "=", "0", "self", ".", "_train_costs", "=", "[", "]", "self", ".", "prepared_worker_pool", ".", "clear", "(", ")", "self", ".", "batch_pool", "=", "range", "(", "self", ".", "num_train_batches", ")", "self", ".", "rand", ".", "shuffle", "(", "self", ".", "batch_pool", ")", "if", "self", ".", "epoch", ">", "self", ".", "end_at", ":", "self", ".", "log", "(", "\"Training is done, wait all workers to stop\"", ")", "return", "False", "else", ":", "self", ".", "log", "(", "\"start epoch {} with lr={}\"", ".", "format", "(", "self", ".", "epoch", ",", "self", ".", "_lr", ")", ")", "return", "True" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
ScheduledTrainingServer.handle_control
Handles a control_request received from a worker. Returns: string or dict: response 'stop' - the worker should quit 'wait' - wait for 1 second 'eval' - evaluate on valid and test set to start a new epoch 'sync_hyperparams' - set learning rate 'valid' - evaluate on valid and test set, then save the params 'train' - train next batches
deepy/multigpu/server.py
def handle_control(self, req, worker_id, req_info): """ Handles a control_request received from a worker. Returns: string or dict: response 'stop' - the worker should quit 'wait' - wait for 1 second 'eval' - evaluate on valid and test set to start a new epoch 'sync_hyperparams' - set learning rate 'valid' - evaluate on valid and test set, then save the params 'train' - train next batches """ if self.start_time is None: self.start_time = time.time() response = "" if req == 'next': if self.num_train_batches == 0: response = "get_num_batches" elif self._done: response = "stop" self.worker_is_done(worker_id) elif self._evaluating: response = 'wait' elif not self.batch_pool: # End of one iter if self._train_costs: with self._lock: sys.stdout.write("\r") sys.stdout.flush() mean_costs = [] for i in range(len(self._training_names)): mean_costs.append(np.mean([c[i] for c in self._train_costs])) self.log("train (epoch={:2d}) {}".format( self.epoch, self.get_monitor_string(zip(self._training_names, mean_costs))) ) response = {'eval': None, 'best_valid_cost': self._best_valid_cost} self._evaluating = True else: # Continue training if worker_id not in self.prepared_worker_pool: response = {"sync_hyperparams": self.feed_hyperparams()} self.prepared_worker_pool.add(worker_id) elif self._iters_from_last_valid >= self._valid_freq: response = {'valid': None, 'best_valid_cost': self._best_valid_cost} self._iters_from_last_valid = 0 else: response = {"train": self.feed_batches()} elif 'eval_done' in req: with self._lock: self._evaluating = False sys.stdout.write("\r") sys.stdout.flush() if 'test_costs' in req and req['test_costs']: self.log("test (epoch={:2d}) {} (worker {})".format( self.epoch, self.get_monitor_string(req['test_costs']), worker_id) ) if 'valid_costs' in req and req['test_costs']: valid_J = req['valid_costs'][0][1] if valid_J < self._best_valid_cost: self._best_valid_cost = valid_J star_str = "*" else: star_str = "" self.log("valid (epoch={:2d}) {} {} (worker {})".format( self.epoch, self.get_monitor_string(req['valid_costs']), star_str, worker_id)) # if star_str and 'auto_save' in req and req['auto_save']: # self.log("(worker {}) save the model to {}".format( # worker_id, # req['auto_save'] # )) continue_training = self.prepare_epoch() self._epoch_start_time = time.time() if not continue_training: self._done = True self.log("training time {:.4f}s".format(time.time() - self.start_time)) response = "stop" elif 'valid_done' in req: with self._lock: sys.stdout.write("\r") sys.stdout.flush() if 'valid_costs' in req: valid_J = req['valid_costs'][0][1] if valid_J < self._best_valid_cost: self._best_valid_cost = valid_J star_str = "*" else: star_str = "" self.log("valid ( dryrun ) {} {} (worker {})".format( self.get_monitor_string(req['valid_costs']), star_str, worker_id )) # if star_str and 'auto_save' in req and req['auto_save']: # self.log("(worker {}) save the model to {}".format( # worker_id, # req['auto_save'] # )) elif 'train_done' in req: costs = req['costs'] self._train_costs.append(costs) sys.stdout.write("\x1b[2K\r> %d%% | J=%.2f | %.1f batch/s" % ( self._current_iter * 100 / self.num_train_batches, costs[0], float(len(self._train_costs) * self.sync_freq) / (time.time() - self._epoch_start_time))) sys.stdout.flush() elif 'get_num_batches_done' in req: self.num_train_batches = req['get_num_batches_done'] elif 'get_easgd_alpha' in req: response = self._easgd_alpha elif 'sync_hyperparams' in req: response = {"sync_hyperparams": self.feed_hyperparams()} elif 'init_schedule' in req: with self._lock: sys.stdout.write("\r") sys.stdout.flush() self.log("worker {} connected".format(worker_id)) if self.epoch == 0: schedule_params = req['init_schedule'] sch_str = " ".join("{}={}".format(a, b) for (a, b) in schedule_params.items()) self.log("initialize the schedule with {}".format(sch_str)) for key, val in schedule_params.items(): if not val: continue if key == 'learning_rate': self._lr = val elif key == 'start_halving_at': self.epoch_start_halving = val elif key == 'halving_freq': self._halving_freq = val elif key == 'end_at': self.end_at = val elif key == 'sync_freq': self.sync_freq = val elif key == 'valid_freq': self._valid_freq = val elif 'set_names' in req: self._training_names = req['training_names'] self._evaluation_names = req['evaluation_names'] return response
def handle_control(self, req, worker_id, req_info): """ Handles a control_request received from a worker. Returns: string or dict: response 'stop' - the worker should quit 'wait' - wait for 1 second 'eval' - evaluate on valid and test set to start a new epoch 'sync_hyperparams' - set learning rate 'valid' - evaluate on valid and test set, then save the params 'train' - train next batches """ if self.start_time is None: self.start_time = time.time() response = "" if req == 'next': if self.num_train_batches == 0: response = "get_num_batches" elif self._done: response = "stop" self.worker_is_done(worker_id) elif self._evaluating: response = 'wait' elif not self.batch_pool: # End of one iter if self._train_costs: with self._lock: sys.stdout.write("\r") sys.stdout.flush() mean_costs = [] for i in range(len(self._training_names)): mean_costs.append(np.mean([c[i] for c in self._train_costs])) self.log("train (epoch={:2d}) {}".format( self.epoch, self.get_monitor_string(zip(self._training_names, mean_costs))) ) response = {'eval': None, 'best_valid_cost': self._best_valid_cost} self._evaluating = True else: # Continue training if worker_id not in self.prepared_worker_pool: response = {"sync_hyperparams": self.feed_hyperparams()} self.prepared_worker_pool.add(worker_id) elif self._iters_from_last_valid >= self._valid_freq: response = {'valid': None, 'best_valid_cost': self._best_valid_cost} self._iters_from_last_valid = 0 else: response = {"train": self.feed_batches()} elif 'eval_done' in req: with self._lock: self._evaluating = False sys.stdout.write("\r") sys.stdout.flush() if 'test_costs' in req and req['test_costs']: self.log("test (epoch={:2d}) {} (worker {})".format( self.epoch, self.get_monitor_string(req['test_costs']), worker_id) ) if 'valid_costs' in req and req['test_costs']: valid_J = req['valid_costs'][0][1] if valid_J < self._best_valid_cost: self._best_valid_cost = valid_J star_str = "*" else: star_str = "" self.log("valid (epoch={:2d}) {} {} (worker {})".format( self.epoch, self.get_monitor_string(req['valid_costs']), star_str, worker_id)) # if star_str and 'auto_save' in req and req['auto_save']: # self.log("(worker {}) save the model to {}".format( # worker_id, # req['auto_save'] # )) continue_training = self.prepare_epoch() self._epoch_start_time = time.time() if not continue_training: self._done = True self.log("training time {:.4f}s".format(time.time() - self.start_time)) response = "stop" elif 'valid_done' in req: with self._lock: sys.stdout.write("\r") sys.stdout.flush() if 'valid_costs' in req: valid_J = req['valid_costs'][0][1] if valid_J < self._best_valid_cost: self._best_valid_cost = valid_J star_str = "*" else: star_str = "" self.log("valid ( dryrun ) {} {} (worker {})".format( self.get_monitor_string(req['valid_costs']), star_str, worker_id )) # if star_str and 'auto_save' in req and req['auto_save']: # self.log("(worker {}) save the model to {}".format( # worker_id, # req['auto_save'] # )) elif 'train_done' in req: costs = req['costs'] self._train_costs.append(costs) sys.stdout.write("\x1b[2K\r> %d%% | J=%.2f | %.1f batch/s" % ( self._current_iter * 100 / self.num_train_batches, costs[0], float(len(self._train_costs) * self.sync_freq) / (time.time() - self._epoch_start_time))) sys.stdout.flush() elif 'get_num_batches_done' in req: self.num_train_batches = req['get_num_batches_done'] elif 'get_easgd_alpha' in req: response = self._easgd_alpha elif 'sync_hyperparams' in req: response = {"sync_hyperparams": self.feed_hyperparams()} elif 'init_schedule' in req: with self._lock: sys.stdout.write("\r") sys.stdout.flush() self.log("worker {} connected".format(worker_id)) if self.epoch == 0: schedule_params = req['init_schedule'] sch_str = " ".join("{}={}".format(a, b) for (a, b) in schedule_params.items()) self.log("initialize the schedule with {}".format(sch_str)) for key, val in schedule_params.items(): if not val: continue if key == 'learning_rate': self._lr = val elif key == 'start_halving_at': self.epoch_start_halving = val elif key == 'halving_freq': self._halving_freq = val elif key == 'end_at': self.end_at = val elif key == 'sync_freq': self.sync_freq = val elif key == 'valid_freq': self._valid_freq = val elif 'set_names' in req: self._training_names = req['training_names'] self._evaluation_names = req['evaluation_names'] return response
[ "Handles", "a", "control_request", "received", "from", "a", "worker", ".", "Returns", ":", "string", "or", "dict", ":", "response" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/multigpu/server.py#L116-L262
[ "def", "handle_control", "(", "self", ",", "req", ",", "worker_id", ",", "req_info", ")", ":", "if", "self", ".", "start_time", "is", "None", ":", "self", ".", "start_time", "=", "time", ".", "time", "(", ")", "response", "=", "\"\"", "if", "req", "==", "'next'", ":", "if", "self", ".", "num_train_batches", "==", "0", ":", "response", "=", "\"get_num_batches\"", "elif", "self", ".", "_done", ":", "response", "=", "\"stop\"", "self", ".", "worker_is_done", "(", "worker_id", ")", "elif", "self", ".", "_evaluating", ":", "response", "=", "'wait'", "elif", "not", "self", ".", "batch_pool", ":", "# End of one iter", "if", "self", ".", "_train_costs", ":", "with", "self", ".", "_lock", ":", "sys", ".", "stdout", ".", "write", "(", "\"\\r\"", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "mean_costs", "=", "[", "]", "for", "i", "in", "range", "(", "len", "(", "self", ".", "_training_names", ")", ")", ":", "mean_costs", ".", "append", "(", "np", ".", "mean", "(", "[", "c", "[", "i", "]", "for", "c", "in", "self", ".", "_train_costs", "]", ")", ")", "self", ".", "log", "(", "\"train (epoch={:2d}) {}\"", ".", "format", "(", "self", ".", "epoch", ",", "self", ".", "get_monitor_string", "(", "zip", "(", "self", ".", "_training_names", ",", "mean_costs", ")", ")", ")", ")", "response", "=", "{", "'eval'", ":", "None", ",", "'best_valid_cost'", ":", "self", ".", "_best_valid_cost", "}", "self", ".", "_evaluating", "=", "True", "else", ":", "# Continue training", "if", "worker_id", "not", "in", "self", ".", "prepared_worker_pool", ":", "response", "=", "{", "\"sync_hyperparams\"", ":", "self", ".", "feed_hyperparams", "(", ")", "}", "self", ".", "prepared_worker_pool", ".", "add", "(", "worker_id", ")", "elif", "self", ".", "_iters_from_last_valid", ">=", "self", ".", "_valid_freq", ":", "response", "=", "{", "'valid'", ":", "None", ",", "'best_valid_cost'", ":", "self", ".", "_best_valid_cost", "}", "self", ".", "_iters_from_last_valid", "=", "0", "else", ":", "response", "=", "{", "\"train\"", ":", "self", ".", "feed_batches", "(", ")", "}", "elif", "'eval_done'", "in", "req", ":", "with", "self", ".", "_lock", ":", "self", ".", "_evaluating", "=", "False", "sys", ".", "stdout", ".", "write", "(", "\"\\r\"", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "if", "'test_costs'", "in", "req", "and", "req", "[", "'test_costs'", "]", ":", "self", ".", "log", "(", "\"test (epoch={:2d}) {} (worker {})\"", ".", "format", "(", "self", ".", "epoch", ",", "self", ".", "get_monitor_string", "(", "req", "[", "'test_costs'", "]", ")", ",", "worker_id", ")", ")", "if", "'valid_costs'", "in", "req", "and", "req", "[", "'test_costs'", "]", ":", "valid_J", "=", "req", "[", "'valid_costs'", "]", "[", "0", "]", "[", "1", "]", "if", "valid_J", "<", "self", ".", "_best_valid_cost", ":", "self", ".", "_best_valid_cost", "=", "valid_J", "star_str", "=", "\"*\"", "else", ":", "star_str", "=", "\"\"", "self", ".", "log", "(", "\"valid (epoch={:2d}) {} {} (worker {})\"", ".", "format", "(", "self", ".", "epoch", ",", "self", ".", "get_monitor_string", "(", "req", "[", "'valid_costs'", "]", ")", ",", "star_str", ",", "worker_id", ")", ")", "# if star_str and 'auto_save' in req and req['auto_save']:", "# self.log(\"(worker {}) save the model to {}\".format(", "# worker_id,", "# req['auto_save']", "# ))", "continue_training", "=", "self", ".", "prepare_epoch", "(", ")", "self", ".", "_epoch_start_time", "=", "time", ".", "time", "(", ")", "if", "not", "continue_training", ":", "self", ".", "_done", "=", "True", "self", ".", "log", "(", "\"training time {:.4f}s\"", ".", "format", "(", "time", ".", "time", "(", ")", "-", "self", ".", "start_time", ")", ")", "response", "=", "\"stop\"", "elif", "'valid_done'", "in", "req", ":", "with", "self", ".", "_lock", ":", "sys", ".", "stdout", ".", "write", "(", "\"\\r\"", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "if", "'valid_costs'", "in", "req", ":", "valid_J", "=", "req", "[", "'valid_costs'", "]", "[", "0", "]", "[", "1", "]", "if", "valid_J", "<", "self", ".", "_best_valid_cost", ":", "self", ".", "_best_valid_cost", "=", "valid_J", "star_str", "=", "\"*\"", "else", ":", "star_str", "=", "\"\"", "self", ".", "log", "(", "\"valid ( dryrun ) {} {} (worker {})\"", ".", "format", "(", "self", ".", "get_monitor_string", "(", "req", "[", "'valid_costs'", "]", ")", ",", "star_str", ",", "worker_id", ")", ")", "# if star_str and 'auto_save' in req and req['auto_save']:", "# self.log(\"(worker {}) save the model to {}\".format(", "# worker_id,", "# req['auto_save']", "# ))", "elif", "'train_done'", "in", "req", ":", "costs", "=", "req", "[", "'costs'", "]", "self", ".", "_train_costs", ".", "append", "(", "costs", ")", "sys", ".", "stdout", ".", "write", "(", "\"\\x1b[2K\\r> %d%% | J=%.2f | %.1f batch/s\"", "%", "(", "self", ".", "_current_iter", "*", "100", "/", "self", ".", "num_train_batches", ",", "costs", "[", "0", "]", ",", "float", "(", "len", "(", "self", ".", "_train_costs", ")", "*", "self", ".", "sync_freq", ")", "/", "(", "time", ".", "time", "(", ")", "-", "self", ".", "_epoch_start_time", ")", ")", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "elif", "'get_num_batches_done'", "in", "req", ":", "self", ".", "num_train_batches", "=", "req", "[", "'get_num_batches_done'", "]", "elif", "'get_easgd_alpha'", "in", "req", ":", "response", "=", "self", ".", "_easgd_alpha", "elif", "'sync_hyperparams'", "in", "req", ":", "response", "=", "{", "\"sync_hyperparams\"", ":", "self", ".", "feed_hyperparams", "(", ")", "}", "elif", "'init_schedule'", "in", "req", ":", "with", "self", ".", "_lock", ":", "sys", ".", "stdout", ".", "write", "(", "\"\\r\"", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "self", ".", "log", "(", "\"worker {} connected\"", ".", "format", "(", "worker_id", ")", ")", "if", "self", ".", "epoch", "==", "0", ":", "schedule_params", "=", "req", "[", "'init_schedule'", "]", "sch_str", "=", "\" \"", ".", "join", "(", "\"{}={}\"", ".", "format", "(", "a", ",", "b", ")", "for", "(", "a", ",", "b", ")", "in", "schedule_params", ".", "items", "(", ")", ")", "self", ".", "log", "(", "\"initialize the schedule with {}\"", ".", "format", "(", "sch_str", ")", ")", "for", "key", ",", "val", "in", "schedule_params", ".", "items", "(", ")", ":", "if", "not", "val", ":", "continue", "if", "key", "==", "'learning_rate'", ":", "self", ".", "_lr", "=", "val", "elif", "key", "==", "'start_halving_at'", ":", "self", ".", "epoch_start_halving", "=", "val", "elif", "key", "==", "'halving_freq'", ":", "self", ".", "_halving_freq", "=", "val", "elif", "key", "==", "'end_at'", ":", "self", ".", "end_at", "=", "val", "elif", "key", "==", "'sync_freq'", ":", "self", ".", "sync_freq", "=", "val", "elif", "key", "==", "'valid_freq'", ":", "self", ".", "_valid_freq", "=", "val", "elif", "'set_names'", "in", "req", ":", "self", ".", "_training_names", "=", "req", "[", "'training_names'", "]", "self", ".", "_evaluation_names", "=", "req", "[", "'evaluation_names'", "]", "return", "response" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
Timer.report
Report elapsed time.
deepy/utils/timer.py
def report(self): """ Report elapsed time. """ if not self.end_time: self.end() print ("Time: {} mins".format((self.end_time - self.start_time )/ 60))
def report(self): """ Report elapsed time. """ if not self.end_time: self.end() print ("Time: {} mins".format((self.end_time - self.start_time )/ 60))
[ "Report", "elapsed", "time", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/utils/timer.py#L21-L27
[ "def", "report", "(", "self", ")", ":", "if", "not", "self", ".", "end_time", ":", "self", ".", "end", "(", ")", "print", "(", "\"Time: {} mins\"", ".", "format", "(", "(", "self", ".", "end_time", "-", "self", ".", "start_time", ")", "/", "60", ")", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
TrainingValidator.compare
Compare to previous records and return whether the given cost is a new best. :return: True if the given cost is a new best
deepy/trainers/controllers.py
def compare(self, cost_map): """ Compare to previous records and return whether the given cost is a new best. :return: True if the given cost is a new best """ cri_val = cost_map[self._criteria] if self._best_criteria is None: self._best_criteria = cri_val return True else: if self._smaller_is_better and cri_val < self._best_criteria: self._best_criteria = cri_val return True elif not self._smaller_is_better and cri_val > self._best_criteria: self._best_criteria = cri_val return True else: return False
def compare(self, cost_map): """ Compare to previous records and return whether the given cost is a new best. :return: True if the given cost is a new best """ cri_val = cost_map[self._criteria] if self._best_criteria is None: self._best_criteria = cri_val return True else: if self._smaller_is_better and cri_val < self._best_criteria: self._best_criteria = cri_val return True elif not self._smaller_is_better and cri_val > self._best_criteria: self._best_criteria = cri_val return True else: return False
[ "Compare", "to", "previous", "records", "and", "return", "whether", "the", "given", "cost", "is", "a", "new", "best", ".", ":", "return", ":", "True", "if", "the", "given", "cost", "is", "a", "new", "best" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/controllers.py#L42-L59
[ "def", "compare", "(", "self", ",", "cost_map", ")", ":", "cri_val", "=", "cost_map", "[", "self", ".", "_criteria", "]", "if", "self", ".", "_best_criteria", "is", "None", ":", "self", ".", "_best_criteria", "=", "cri_val", "return", "True", "else", ":", "if", "self", ".", "_smaller_is_better", "and", "cri_val", "<", "self", ".", "_best_criteria", ":", "self", ".", "_best_criteria", "=", "cri_val", "return", "True", "elif", "not", "self", ".", "_smaller_is_better", "and", "cri_val", ">", "self", ".", "_best_criteria", ":", "self", ".", "_best_criteria", "=", "cri_val", "return", "True", "else", ":", "return", "False" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
TrainingValidator.run
Run the model with validation data and return costs.
deepy/trainers/controllers.py
def run(self, data_x): """ Run the model with validation data and return costs. """ output_vars = self.compute(*data_x) return self._extract_costs(output_vars)
def run(self, data_x): """ Run the model with validation data and return costs. """ output_vars = self.compute(*data_x) return self._extract_costs(output_vars)
[ "Run", "the", "model", "with", "validation", "data", "and", "return", "costs", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/controllers.py#L79-L84
[ "def", "run", "(", "self", ",", "data_x", ")", ":", "output_vars", "=", "self", ".", "compute", "(", "*", "data_x", ")", "return", "self", ".", "_extract_costs", "(", "output_vars", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
TrainingValidator.invoke
This function will be called after each iteration.
deepy/trainers/controllers.py
def invoke(self): """ This function will be called after each iteration. """ self._counter += 1 if self._counter % self._freq == 0: cnt = 0. sum_map = defaultdict(float) for x in self._trainer.get_data(self._data_split): val_map = self.run(x) if not isinstance(val_map, dict): raise Exception("Monitor.run must return a dict.") for k, val in val_map.items(): sum_map[k] += val cnt += 1 for k in sum_map: sum_map[k] /= cnt new_best = self.compare(sum_map) self._trainer.report(sum_map, self._data_split, new_best=new_best) if new_best: self._trainer.save_checkpoint(self._save_path)
def invoke(self): """ This function will be called after each iteration. """ self._counter += 1 if self._counter % self._freq == 0: cnt = 0. sum_map = defaultdict(float) for x in self._trainer.get_data(self._data_split): val_map = self.run(x) if not isinstance(val_map, dict): raise Exception("Monitor.run must return a dict.") for k, val in val_map.items(): sum_map[k] += val cnt += 1 for k in sum_map: sum_map[k] /= cnt new_best = self.compare(sum_map) self._trainer.report(sum_map, self._data_split, new_best=new_best) if new_best: self._trainer.save_checkpoint(self._save_path)
[ "This", "function", "will", "be", "called", "after", "each", "iteration", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/controllers.py#L86-L106
[ "def", "invoke", "(", "self", ")", ":", "self", ".", "_counter", "+=", "1", "if", "self", ".", "_counter", "%", "self", ".", "_freq", "==", "0", ":", "cnt", "=", "0.", "sum_map", "=", "defaultdict", "(", "float", ")", "for", "x", "in", "self", ".", "_trainer", ".", "get_data", "(", "self", ".", "_data_split", ")", ":", "val_map", "=", "self", ".", "run", "(", "x", ")", "if", "not", "isinstance", "(", "val_map", ",", "dict", ")", ":", "raise", "Exception", "(", "\"Monitor.run must return a dict.\"", ")", "for", "k", ",", "val", "in", "val_map", ".", "items", "(", ")", ":", "sum_map", "[", "k", "]", "+=", "val", "cnt", "+=", "1", "for", "k", "in", "sum_map", ":", "sum_map", "[", "k", "]", "/=", "cnt", "new_best", "=", "self", ".", "compare", "(", "sum_map", ")", "self", ".", "_trainer", ".", "report", "(", "sum_map", ",", "self", ".", "_data_split", ",", "new_best", "=", "new_best", ")", "if", "new_best", ":", "self", ".", "_trainer", ".", "save_checkpoint", "(", "self", ".", "_save_path", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
Loop._build_loop_vars
Create inner loop variables.
deepy/core/loop.py
def _build_loop_vars(self): """ Create inner loop variables. """ from theano.tensor.var import TensorVariable from deepy.core.neural_var import NeuralVariable if not self._loop_vars: self._ordered_out_keys = self._outputs.keys() seq_keys = self._sequences.keys() filled_out_keys = [k for k in self._ordered_out_keys if self._outputs[k]] nonseq_keys = self._non_sequences.keys() dummy_tensors, self._scan_local_vars = get_dummy_args( sequences=[self._sequences[k].tensor for k in seq_keys], outputs_info=[self._outputs[k].tensor for k in self._ordered_out_keys], non_sequences=[self._non_sequences[k].tensor for k in nonseq_keys], **self._kwargs ) dummy_map = dict(zip(seq_keys + filled_out_keys + nonseq_keys, dummy_tensors)) arg_map = self._sequences.copy() arg_map.update(self._outputs) arg_map.update(self._non_sequences) self._loop_vars = LoopVars() for k, dummy_tensor in dummy_map.items(): dummy_var = NeuralVariable(dummy_tensor, dim=arg_map[k].dim()) self._loop_vars[k] = dummy_var
def _build_loop_vars(self): """ Create inner loop variables. """ from theano.tensor.var import TensorVariable from deepy.core.neural_var import NeuralVariable if not self._loop_vars: self._ordered_out_keys = self._outputs.keys() seq_keys = self._sequences.keys() filled_out_keys = [k for k in self._ordered_out_keys if self._outputs[k]] nonseq_keys = self._non_sequences.keys() dummy_tensors, self._scan_local_vars = get_dummy_args( sequences=[self._sequences[k].tensor for k in seq_keys], outputs_info=[self._outputs[k].tensor for k in self._ordered_out_keys], non_sequences=[self._non_sequences[k].tensor for k in nonseq_keys], **self._kwargs ) dummy_map = dict(zip(seq_keys + filled_out_keys + nonseq_keys, dummy_tensors)) arg_map = self._sequences.copy() arg_map.update(self._outputs) arg_map.update(self._non_sequences) self._loop_vars = LoopVars() for k, dummy_tensor in dummy_map.items(): dummy_var = NeuralVariable(dummy_tensor, dim=arg_map[k].dim()) self._loop_vars[k] = dummy_var
[ "Create", "inner", "loop", "variables", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/core/loop.py#L32-L56
[ "def", "_build_loop_vars", "(", "self", ")", ":", "from", "theano", ".", "tensor", ".", "var", "import", "TensorVariable", "from", "deepy", ".", "core", ".", "neural_var", "import", "NeuralVariable", "if", "not", "self", ".", "_loop_vars", ":", "self", ".", "_ordered_out_keys", "=", "self", ".", "_outputs", ".", "keys", "(", ")", "seq_keys", "=", "self", ".", "_sequences", ".", "keys", "(", ")", "filled_out_keys", "=", "[", "k", "for", "k", "in", "self", ".", "_ordered_out_keys", "if", "self", ".", "_outputs", "[", "k", "]", "]", "nonseq_keys", "=", "self", ".", "_non_sequences", ".", "keys", "(", ")", "dummy_tensors", ",", "self", ".", "_scan_local_vars", "=", "get_dummy_args", "(", "sequences", "=", "[", "self", ".", "_sequences", "[", "k", "]", ".", "tensor", "for", "k", "in", "seq_keys", "]", ",", "outputs_info", "=", "[", "self", ".", "_outputs", "[", "k", "]", ".", "tensor", "for", "k", "in", "self", ".", "_ordered_out_keys", "]", ",", "non_sequences", "=", "[", "self", ".", "_non_sequences", "[", "k", "]", ".", "tensor", "for", "k", "in", "nonseq_keys", "]", ",", "*", "*", "self", ".", "_kwargs", ")", "dummy_map", "=", "dict", "(", "zip", "(", "seq_keys", "+", "filled_out_keys", "+", "nonseq_keys", ",", "dummy_tensors", ")", ")", "arg_map", "=", "self", ".", "_sequences", ".", "copy", "(", ")", "arg_map", ".", "update", "(", "self", ".", "_outputs", ")", "arg_map", ".", "update", "(", "self", ".", "_non_sequences", ")", "self", ".", "_loop_vars", "=", "LoopVars", "(", ")", "for", "k", ",", "dummy_tensor", "in", "dummy_map", ".", "items", "(", ")", ":", "dummy_var", "=", "NeuralVariable", "(", "dummy_tensor", ",", "dim", "=", "arg_map", "[", "k", "]", ".", "dim", "(", ")", ")", "self", ".", "_loop_vars", "[", "k", "]", "=", "dummy_var" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
Loop._scan_step
Internal scan with dummy input variables.
deepy/core/loop.py
def _scan_step(self, vars): """ Internal scan with dummy input variables. """ from neural_var import NeuralVariable if not self._loop_vars: raise Exception("The loop is not initialized. To initialize the loop, use `with loop as vars`") replace_map = {} for k, var in vars.items(): if var is not None: replace_map[self._dummy_nodes[k].tensor] = var.tensor outputs = {} for k in self._outputs: if k not in self._loop_vars: raise Exception("{} can not be found in loop vars.".format(k)) output_node = theano.clone(self._loop_vars[k].tensor, replace_map) outputs[k] = NeuralVariable(output_node, self._loop_vars[k].dim()) return outputs
def _scan_step(self, vars): """ Internal scan with dummy input variables. """ from neural_var import NeuralVariable if not self._loop_vars: raise Exception("The loop is not initialized. To initialize the loop, use `with loop as vars`") replace_map = {} for k, var in vars.items(): if var is not None: replace_map[self._dummy_nodes[k].tensor] = var.tensor outputs = {} for k in self._outputs: if k not in self._loop_vars: raise Exception("{} can not be found in loop vars.".format(k)) output_node = theano.clone(self._loop_vars[k].tensor, replace_map) outputs[k] = NeuralVariable(output_node, self._loop_vars[k].dim()) return outputs
[ "Internal", "scan", "with", "dummy", "input", "variables", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/core/loop.py#L86-L103
[ "def", "_scan_step", "(", "self", ",", "vars", ")", ":", "from", "neural_var", "import", "NeuralVariable", "if", "not", "self", ".", "_loop_vars", ":", "raise", "Exception", "(", "\"The loop is not initialized. To initialize the loop, use `with loop as vars`\"", ")", "replace_map", "=", "{", "}", "for", "k", ",", "var", "in", "vars", ".", "items", "(", ")", ":", "if", "var", "is", "not", "None", ":", "replace_map", "[", "self", ".", "_dummy_nodes", "[", "k", "]", ".", "tensor", "]", "=", "var", ".", "tensor", "outputs", "=", "{", "}", "for", "k", "in", "self", ".", "_outputs", ":", "if", "k", "not", "in", "self", ".", "_loop_vars", ":", "raise", "Exception", "(", "\"{} can not be found in loop vars.\"", ".", "format", "(", "k", ")", ")", "output_node", "=", "theano", ".", "clone", "(", "self", ".", "_loop_vars", "[", "k", "]", ".", "tensor", ",", "replace_map", ")", "outputs", "[", "k", "]", "=", "NeuralVariable", "(", "output_node", ",", "self", ".", "_loop_vars", "[", "k", "]", ".", "dim", "(", ")", ")", "return", "outputs" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
Loop.get_outputs
Get the outputs of the loop. Return specific variables by passing the keys to the arguments. :rtype: MapDict
deepy/core/loop.py
def get_outputs(self, *args): """ Get the outputs of the loop. Return specific variables by passing the keys to the arguments. :rtype: MapDict """ if args: output_vars = map(self._scan_outputs.get, args) if len(output_vars) == 1: return output_vars[0] else: return output_vars else: return self._scan_outputs
def get_outputs(self, *args): """ Get the outputs of the loop. Return specific variables by passing the keys to the arguments. :rtype: MapDict """ if args: output_vars = map(self._scan_outputs.get, args) if len(output_vars) == 1: return output_vars[0] else: return output_vars else: return self._scan_outputs
[ "Get", "the", "outputs", "of", "the", "loop", ".", "Return", "specific", "variables", "by", "passing", "the", "keys", "to", "the", "arguments", ".", ":", "rtype", ":", "MapDict" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/core/loop.py#L116-L129
[ "def", "get_outputs", "(", "self", ",", "*", "args", ")", ":", "if", "args", ":", "output_vars", "=", "map", "(", "self", ".", "_scan_outputs", ".", "get", ",", "args", ")", "if", "len", "(", "output_vars", ")", "==", "1", ":", "return", "output_vars", "[", "0", "]", "else", ":", "return", "output_vars", "else", ":", "return", "self", ".", "_scan_outputs" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
momentum_core
Momentum SGD optimization core.
deepy/trainers/cores/momentum.py
def momentum_core(params, gradients, momentum=0.9, learning_rate=0.01): """ Momentum SGD optimization core. """ free_parameters = [] updates = [] for param, grad in zip(params, gradients): delta = learning_rate * grad velocity = theano.shared(np.zeros_like(param.get_value()), name=param.name + '_vel') updates.append((velocity, momentum * velocity - delta)) updates.append((param, param + velocity)) free_parameters.append(velocity) return updates, free_parameters
def momentum_core(params, gradients, momentum=0.9, learning_rate=0.01): """ Momentum SGD optimization core. """ free_parameters = [] updates = [] for param, grad in zip(params, gradients): delta = learning_rate * grad velocity = theano.shared(np.zeros_like(param.get_value()), name=param.name + '_vel') updates.append((velocity, momentum * velocity - delta)) updates.append((param, param + velocity)) free_parameters.append(velocity) return updates, free_parameters
[ "Momentum", "SGD", "optimization", "core", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/cores/momentum.py#L7-L19
[ "def", "momentum_core", "(", "params", ",", "gradients", ",", "momentum", "=", "0.9", ",", "learning_rate", "=", "0.01", ")", ":", "free_parameters", "=", "[", "]", "updates", "=", "[", "]", "for", "param", ",", "grad", "in", "zip", "(", "params", ",", "gradients", ")", ":", "delta", "=", "learning_rate", "*", "grad", "velocity", "=", "theano", ".", "shared", "(", "np", ".", "zeros_like", "(", "param", ".", "get_value", "(", ")", ")", ",", "name", "=", "param", ".", "name", "+", "'_vel'", ")", "updates", ".", "append", "(", "(", "velocity", ",", "momentum", "*", "velocity", "-", "delta", ")", ")", "updates", ".", "append", "(", "(", "param", ",", "param", "+", "velocity", ")", ")", "free_parameters", ".", "append", "(", "velocity", ")", "return", "updates", ",", "free_parameters" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
Runtime.iftrain
Execute `then_branch` when training.
deepy/core/runtime.py
def iftrain(self, then_branch, else_branch): """ Execute `then_branch` when training. """ return ifelse(self._training_flag, then_branch, else_branch, name="iftrain")
def iftrain(self, then_branch, else_branch): """ Execute `then_branch` when training. """ return ifelse(self._training_flag, then_branch, else_branch, name="iftrain")
[ "Execute", "then_branch", "when", "training", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/core/runtime.py#L20-L24
[ "def", "iftrain", "(", "self", ",", "then_branch", ",", "else_branch", ")", ":", "return", "ifelse", "(", "self", ".", "_training_flag", ",", "then_branch", ",", "else_branch", ",", "name", "=", "\"iftrain\"", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
Runtime.switch_training
Switch training mode. :param flag: switch on training mode when flag is True.
deepy/core/runtime.py
def switch_training(self, flag): """ Switch training mode. :param flag: switch on training mode when flag is True. """ if self._is_training == flag: return self._is_training = flag if flag: self._training_flag.set_value(1) else: self._training_flag.set_value(0)
def switch_training(self, flag): """ Switch training mode. :param flag: switch on training mode when flag is True. """ if self._is_training == flag: return self._is_training = flag if flag: self._training_flag.set_value(1) else: self._training_flag.set_value(0)
[ "Switch", "training", "mode", ".", ":", "param", "flag", ":", "switch", "on", "training", "mode", "when", "flag", "is", "True", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/core/runtime.py#L26-L36
[ "def", "switch_training", "(", "self", ",", "flag", ")", ":", "if", "self", ".", "_is_training", "==", "flag", ":", "return", "self", ".", "_is_training", "=", "flag", "if", "flag", ":", "self", ".", "_training_flag", ".", "set_value", "(", "1", ")", "else", ":", "self", ".", "_training_flag", ".", "set_value", "(", "0", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
nag_core
Nesterov's Accelerated Gradient (NAG). See http://www.cs.toronto.edu/~fritz/absps/momentum.pdf . Still unfinished
deepy/trainers/cores/nag.py
def nag_core(params, J, momentum=0.9, learning_rate=0.01): """ Nesterov's Accelerated Gradient (NAG). See http://www.cs.toronto.edu/~fritz/absps/momentum.pdf . Still unfinished """ # TODO: this requires some refractorings. for param in params: step = theano.shared(np.zeros_like(param.get_value()), name=param.name + '_step') velocity = theano.shared(np.zeros_like(param.get_value()), name=param.name + '_vel') yield step, momentum * velocity yield param, param + step yield velocity, step - learning_rate * T.grad(J, param) yield param, param + velocity - step
def nag_core(params, J, momentum=0.9, learning_rate=0.01): """ Nesterov's Accelerated Gradient (NAG). See http://www.cs.toronto.edu/~fritz/absps/momentum.pdf . Still unfinished """ # TODO: this requires some refractorings. for param in params: step = theano.shared(np.zeros_like(param.get_value()), name=param.name + '_step') velocity = theano.shared(np.zeros_like(param.get_value()), name=param.name + '_vel') yield step, momentum * velocity yield param, param + step yield velocity, step - learning_rate * T.grad(J, param) yield param, param + velocity - step
[ "Nesterov", "s", "Accelerated", "Gradient", "(", "NAG", ")", ".", "See", "http", ":", "//", "www", ".", "cs", ".", "toronto", ".", "edu", "/", "~fritz", "/", "absps", "/", "momentum", ".", "pdf", ".", "Still", "unfinished" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/cores/nag.py#L8-L21
[ "def", "nag_core", "(", "params", ",", "J", ",", "momentum", "=", "0.9", ",", "learning_rate", "=", "0.01", ")", ":", "# TODO: this requires some refractorings.", "for", "param", "in", "params", ":", "step", "=", "theano", ".", "shared", "(", "np", ".", "zeros_like", "(", "param", ".", "get_value", "(", ")", ")", ",", "name", "=", "param", ".", "name", "+", "'_step'", ")", "velocity", "=", "theano", ".", "shared", "(", "np", ".", "zeros_like", "(", "param", ".", "get_value", "(", ")", ")", ",", "name", "=", "param", ".", "name", "+", "'_vel'", ")", "yield", "step", ",", "momentum", "*", "velocity", "yield", "param", ",", "param", "+", "step", "yield", "velocity", ",", "step", "-", "learning_rate", "*", "T", ".", "grad", "(", "J", ",", "param", ")", "yield", "param", ",", "param", "+", "velocity", "-", "step" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralTrainer.skip
Skip N batches in the training.
deepy/trainers/base.py
def skip(self, n_batches, n_epochs=0): """ Skip N batches in the training. """ logging.info("skip %d epochs and %d batches" % (n_epochs, n_batches)) self._skip_batches = n_batches self._skip_epochs = n_epochs
def skip(self, n_batches, n_epochs=0): """ Skip N batches in the training. """ logging.info("skip %d epochs and %d batches" % (n_epochs, n_batches)) self._skip_batches = n_batches self._skip_epochs = n_epochs
[ "Skip", "N", "batches", "in", "the", "training", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/base.py#L90-L96
[ "def", "skip", "(", "self", ",", "n_batches", ",", "n_epochs", "=", "0", ")", ":", "logging", ".", "info", "(", "\"skip %d epochs and %d batches\"", "%", "(", "n_epochs", ",", "n_batches", ")", ")", "self", ".", "_skip_batches", "=", "n_batches", "self", ".", "_skip_epochs", "=", "n_epochs" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralTrainer.load_params
Load parameters for the training. This method can load free parameters and resume the training progress.
deepy/trainers/base.py
def load_params(self, path, exclude_free_params=False): """ Load parameters for the training. This method can load free parameters and resume the training progress. """ self.network.load_params(path, exclude_free_params=exclude_free_params) self.best_params = self.copy_params() # Resume the progress if self.network.train_logger.progress() > 0 or self.network.train_logger.epoch() > 0: self.skip(self.network.train_logger.progress(), self.network.train_logger.epoch() - 1)
def load_params(self, path, exclude_free_params=False): """ Load parameters for the training. This method can load free parameters and resume the training progress. """ self.network.load_params(path, exclude_free_params=exclude_free_params) self.best_params = self.copy_params() # Resume the progress if self.network.train_logger.progress() > 0 or self.network.train_logger.epoch() > 0: self.skip(self.network.train_logger.progress(), self.network.train_logger.epoch() - 1)
[ "Load", "parameters", "for", "the", "training", ".", "This", "method", "can", "load", "free", "parameters", "and", "resume", "the", "training", "progress", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/base.py#L144-L153
[ "def", "load_params", "(", "self", ",", "path", ",", "exclude_free_params", "=", "False", ")", ":", "self", ".", "network", ".", "load_params", "(", "path", ",", "exclude_free_params", "=", "exclude_free_params", ")", "self", ".", "best_params", "=", "self", ".", "copy_params", "(", ")", "# Resume the progress", "if", "self", ".", "network", ".", "train_logger", ".", "progress", "(", ")", ">", "0", "or", "self", ".", "network", ".", "train_logger", ".", "epoch", "(", ")", ">", "0", ":", "self", ".", "skip", "(", "self", ".", "network", ".", "train_logger", ".", "progress", "(", ")", ",", "self", ".", "network", ".", "train_logger", ".", "epoch", "(", ")", "-", "1", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralTrainer.add_iter_controllers
Add iteration callbacks function (receives an argument of the trainer). :param controllers: can be a `TrainingController` or a function. :type funcs: list of TrainingContoller
deepy/trainers/base.py
def add_iter_controllers(self, *controllers): """ Add iteration callbacks function (receives an argument of the trainer). :param controllers: can be a `TrainingController` or a function. :type funcs: list of TrainingContoller """ for controller in controllers: if isinstance(controller, TrainingController): controller.bind(self) self._iter_controllers.append(controller)
def add_iter_controllers(self, *controllers): """ Add iteration callbacks function (receives an argument of the trainer). :param controllers: can be a `TrainingController` or a function. :type funcs: list of TrainingContoller """ for controller in controllers: if isinstance(controller, TrainingController): controller.bind(self) self._iter_controllers.append(controller)
[ "Add", "iteration", "callbacks", "function", "(", "receives", "an", "argument", "of", "the", "trainer", ")", ".", ":", "param", "controllers", ":", "can", "be", "a", "TrainingController", "or", "a", "function", ".", ":", "type", "funcs", ":", "list", "of", "TrainingContoller" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/base.py#L166-L175
[ "def", "add_iter_controllers", "(", "self", ",", "*", "controllers", ")", ":", "for", "controller", "in", "controllers", ":", "if", "isinstance", "(", "controller", ",", "TrainingController", ")", ":", "controller", ".", "bind", "(", "self", ")", "self", ".", "_iter_controllers", ".", "append", "(", "controller", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralTrainer.add_epoch_controllers
Add epoch callbacks function. :param controllers: can be a `TrainingController` or a function.
deepy/trainers/base.py
def add_epoch_controllers(self, *controllers): """ Add epoch callbacks function. :param controllers: can be a `TrainingController` or a function. """ for controller in controllers: if isinstance(controller, TrainingController): controller.bind(self) self._epoch_controllers.append(controller)
def add_epoch_controllers(self, *controllers): """ Add epoch callbacks function. :param controllers: can be a `TrainingController` or a function. """ for controller in controllers: if isinstance(controller, TrainingController): controller.bind(self) self._epoch_controllers.append(controller)
[ "Add", "epoch", "callbacks", "function", ".", ":", "param", "controllers", ":", "can", "be", "a", "TrainingController", "or", "a", "function", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/base.py#L177-L185
[ "def", "add_epoch_controllers", "(", "self", ",", "*", "controllers", ")", ":", "for", "controller", "in", "controllers", ":", "if", "isinstance", "(", "controller", ",", "TrainingController", ")", ":", "controller", ".", "bind", "(", "self", ")", "self", ".", "_epoch_controllers", ".", "append", "(", "controller", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralTrainer.train
Train the model and return costs.
deepy/trainers/base.py
def train(self, train_set, valid_set=None, test_set=None, train_size=None): """ Train the model and return costs. """ self._epoch = 0 while True: if self._skip_epochs > 0: logging.info("skipping one epoch ...") self._skip_epochs -= 1 self._epoch += 1 yield None continue # Test if not self._epoch % self.config.test_frequency and test_set: try: self._run_test(self._epoch, test_set) except KeyboardInterrupt: logging.info('interrupted!') break # Validate if not self._epoch % self.validation_frequency and valid_set: try: if not self._run_valid(self._epoch, valid_set): logging.info('patience elapsed, bailing out') break except KeyboardInterrupt: logging.info('interrupted!') break # Train one step try: costs = self._run_train(self._epoch, train_set, train_size) except KeyboardInterrupt: logging.info('interrupted!') break # Check costs if np.isnan(costs[0][1]): logging.info("NaN detected in costs, rollback to last parameters") self.set_params(*self.checkpoint) else: self._epoch += 1 self.network.epoch_callback() yield dict(costs) if valid_set and self.config.get("save_best_parameters", True): self.set_params(*self.best_params) if test_set: self._run_test(-1, test_set)
def train(self, train_set, valid_set=None, test_set=None, train_size=None): """ Train the model and return costs. """ self._epoch = 0 while True: if self._skip_epochs > 0: logging.info("skipping one epoch ...") self._skip_epochs -= 1 self._epoch += 1 yield None continue # Test if not self._epoch % self.config.test_frequency and test_set: try: self._run_test(self._epoch, test_set) except KeyboardInterrupt: logging.info('interrupted!') break # Validate if not self._epoch % self.validation_frequency and valid_set: try: if not self._run_valid(self._epoch, valid_set): logging.info('patience elapsed, bailing out') break except KeyboardInterrupt: logging.info('interrupted!') break # Train one step try: costs = self._run_train(self._epoch, train_set, train_size) except KeyboardInterrupt: logging.info('interrupted!') break # Check costs if np.isnan(costs[0][1]): logging.info("NaN detected in costs, rollback to last parameters") self.set_params(*self.checkpoint) else: self._epoch += 1 self.network.epoch_callback() yield dict(costs) if valid_set and self.config.get("save_best_parameters", True): self.set_params(*self.best_params) if test_set: self._run_test(-1, test_set)
[ "Train", "the", "model", "and", "return", "costs", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/base.py#L187-L236
[ "def", "train", "(", "self", ",", "train_set", ",", "valid_set", "=", "None", ",", "test_set", "=", "None", ",", "train_size", "=", "None", ")", ":", "self", ".", "_epoch", "=", "0", "while", "True", ":", "if", "self", ".", "_skip_epochs", ">", "0", ":", "logging", ".", "info", "(", "\"skipping one epoch ...\"", ")", "self", ".", "_skip_epochs", "-=", "1", "self", ".", "_epoch", "+=", "1", "yield", "None", "continue", "# Test", "if", "not", "self", ".", "_epoch", "%", "self", ".", "config", ".", "test_frequency", "and", "test_set", ":", "try", ":", "self", ".", "_run_test", "(", "self", ".", "_epoch", ",", "test_set", ")", "except", "KeyboardInterrupt", ":", "logging", ".", "info", "(", "'interrupted!'", ")", "break", "# Validate", "if", "not", "self", ".", "_epoch", "%", "self", ".", "validation_frequency", "and", "valid_set", ":", "try", ":", "if", "not", "self", ".", "_run_valid", "(", "self", ".", "_epoch", ",", "valid_set", ")", ":", "logging", ".", "info", "(", "'patience elapsed, bailing out'", ")", "break", "except", "KeyboardInterrupt", ":", "logging", ".", "info", "(", "'interrupted!'", ")", "break", "# Train one step", "try", ":", "costs", "=", "self", ".", "_run_train", "(", "self", ".", "_epoch", ",", "train_set", ",", "train_size", ")", "except", "KeyboardInterrupt", ":", "logging", ".", "info", "(", "'interrupted!'", ")", "break", "# Check costs", "if", "np", ".", "isnan", "(", "costs", "[", "0", "]", "[", "1", "]", ")", ":", "logging", ".", "info", "(", "\"NaN detected in costs, rollback to last parameters\"", ")", "self", ".", "set_params", "(", "*", "self", ".", "checkpoint", ")", "else", ":", "self", ".", "_epoch", "+=", "1", "self", ".", "network", ".", "epoch_callback", "(", ")", "yield", "dict", "(", "costs", ")", "if", "valid_set", "and", "self", ".", "config", ".", "get", "(", "\"save_best_parameters\"", ",", "True", ")", ":", "self", ".", "set_params", "(", "*", "self", ".", "best_params", ")", "if", "test_set", ":", "self", ".", "_run_test", "(", "-", "1", ",", "test_set", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralTrainer._run_train
Run one training iteration.
deepy/trainers/base.py
def _run_train(self, epoch, train_set, train_size=None): """ Run one training iteration. """ self.network.train_logger.record_epoch(epoch + 1) costs = self.train_step(train_set, train_size) if not epoch % self.config.monitor_frequency: self.report(dict(costs), "train", epoch) self.last_run_costs = costs return costs
def _run_train(self, epoch, train_set, train_size=None): """ Run one training iteration. """ self.network.train_logger.record_epoch(epoch + 1) costs = self.train_step(train_set, train_size) if not epoch % self.config.monitor_frequency: self.report(dict(costs), "train", epoch) self.last_run_costs = costs return costs
[ "Run", "one", "training", "iteration", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/base.py#L254-L263
[ "def", "_run_train", "(", "self", ",", "epoch", ",", "train_set", ",", "train_size", "=", "None", ")", ":", "self", ".", "network", ".", "train_logger", ".", "record_epoch", "(", "epoch", "+", "1", ")", "costs", "=", "self", ".", "train_step", "(", "train_set", ",", "train_size", ")", "if", "not", "epoch", "%", "self", ".", "config", ".", "monitor_frequency", ":", "self", ".", "report", "(", "dict", "(", "costs", ")", ",", "\"train\"", ",", "epoch", ")", "self", ".", "last_run_costs", "=", "costs", "return", "costs" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralTrainer._run_valid
Run one valid iteration, return true if to continue training.
deepy/trainers/base.py
def _run_valid(self, epoch, valid_set, dry_run=False, save_path=None): """ Run one valid iteration, return true if to continue training. """ costs = self.valid_step(valid_set) # this is the same as: (J_i - J_f) / J_i > min improvement _, J = costs[0] new_best = False if self.best_cost - J > self.best_cost * self.min_improvement: # save the best cost and parameters self.best_params = self.copy_params() new_best = True if not dry_run: self.best_cost = J self.best_epoch = epoch self.save_checkpoint(save_path) self.report(dict(costs), type="valid", epoch=0 if dry_run else epoch, new_best=new_best) self.last_run_costs = costs return epoch - self.best_epoch < self.patience
def _run_valid(self, epoch, valid_set, dry_run=False, save_path=None): """ Run one valid iteration, return true if to continue training. """ costs = self.valid_step(valid_set) # this is the same as: (J_i - J_f) / J_i > min improvement _, J = costs[0] new_best = False if self.best_cost - J > self.best_cost * self.min_improvement: # save the best cost and parameters self.best_params = self.copy_params() new_best = True if not dry_run: self.best_cost = J self.best_epoch = epoch self.save_checkpoint(save_path) self.report(dict(costs), type="valid", epoch=0 if dry_run else epoch, new_best=new_best) self.last_run_costs = costs return epoch - self.best_epoch < self.patience
[ "Run", "one", "valid", "iteration", "return", "true", "if", "to", "continue", "training", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/base.py#L265-L284
[ "def", "_run_valid", "(", "self", ",", "epoch", ",", "valid_set", ",", "dry_run", "=", "False", ",", "save_path", "=", "None", ")", ":", "costs", "=", "self", ".", "valid_step", "(", "valid_set", ")", "# this is the same as: (J_i - J_f) / J_i > min improvement", "_", ",", "J", "=", "costs", "[", "0", "]", "new_best", "=", "False", "if", "self", ".", "best_cost", "-", "J", ">", "self", ".", "best_cost", "*", "self", ".", "min_improvement", ":", "# save the best cost and parameters", "self", ".", "best_params", "=", "self", ".", "copy_params", "(", ")", "new_best", "=", "True", "if", "not", "dry_run", ":", "self", ".", "best_cost", "=", "J", "self", ".", "best_epoch", "=", "epoch", "self", ".", "save_checkpoint", "(", "save_path", ")", "self", ".", "report", "(", "dict", "(", "costs", ")", ",", "type", "=", "\"valid\"", ",", "epoch", "=", "0", "if", "dry_run", "else", "epoch", ",", "new_best", "=", "new_best", ")", "self", ".", "last_run_costs", "=", "costs", "return", "epoch", "-", "self", ".", "best_epoch", "<", "self", ".", "patience" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralTrainer.report
Report the scores and record them in the log.
deepy/trainers/base.py
def report(self, score_map, type="valid", epoch=-1, new_best=False): """ Report the scores and record them in the log. """ type_str = type if len(type_str) < 5: type_str += " " * (5 - len(type_str)) info = " ".join("%s=%.2f" % el for el in score_map.items()) current_epoch = epoch if epoch > 0 else self.current_epoch() epoch_str = "epoch={}".format(current_epoch + 1) if epoch < 0: epoch_str = "dryrun" sys.stdout.write("\r") sys.stdout.flush() marker = " *" if new_best else "" message = "{} ({}) {}{}".format(type_str, epoch_str, info, marker) self.network.train_logger.record(message) logging.info(message)
def report(self, score_map, type="valid", epoch=-1, new_best=False): """ Report the scores and record them in the log. """ type_str = type if len(type_str) < 5: type_str += " " * (5 - len(type_str)) info = " ".join("%s=%.2f" % el for el in score_map.items()) current_epoch = epoch if epoch > 0 else self.current_epoch() epoch_str = "epoch={}".format(current_epoch + 1) if epoch < 0: epoch_str = "dryrun" sys.stdout.write("\r") sys.stdout.flush() marker = " *" if new_best else "" message = "{} ({}) {}{}".format(type_str, epoch_str, info, marker) self.network.train_logger.record(message) logging.info(message)
[ "Report", "the", "scores", "and", "record", "them", "in", "the", "log", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/base.py#L293-L310
[ "def", "report", "(", "self", ",", "score_map", ",", "type", "=", "\"valid\"", ",", "epoch", "=", "-", "1", ",", "new_best", "=", "False", ")", ":", "type_str", "=", "type", "if", "len", "(", "type_str", ")", "<", "5", ":", "type_str", "+=", "\" \"", "*", "(", "5", "-", "len", "(", "type_str", ")", ")", "info", "=", "\" \"", ".", "join", "(", "\"%s=%.2f\"", "%", "el", "for", "el", "in", "score_map", ".", "items", "(", ")", ")", "current_epoch", "=", "epoch", "if", "epoch", ">", "0", "else", "self", ".", "current_epoch", "(", ")", "epoch_str", "=", "\"epoch={}\"", ".", "format", "(", "current_epoch", "+", "1", ")", "if", "epoch", "<", "0", ":", "epoch_str", "=", "\"dryrun\"", "sys", ".", "stdout", ".", "write", "(", "\"\\r\"", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "marker", "=", "\" *\"", "if", "new_best", "else", "\"\"", "message", "=", "\"{} ({}) {}{}\"", ".", "format", "(", "type_str", ",", "epoch_str", ",", "info", ",", "marker", ")", "self", ".", "network", ".", "train_logger", ".", "record", "(", "message", ")", "logging", ".", "info", "(", "message", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralTrainer.get_data
Get specified split of data.
deepy/trainers/base.py
def get_data(self, data_split="train"): """ Get specified split of data. """ if data_split == 'train': return self._current_train_set elif data_split == 'valid': return self._current_valid_set elif data_split == 'test': return self._current_test_set else: return None
def get_data(self, data_split="train"): """ Get specified split of data. """ if data_split == 'train': return self._current_train_set elif data_split == 'valid': return self._current_valid_set elif data_split == 'test': return self._current_test_set else: return None
[ "Get", "specified", "split", "of", "data", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/base.py#L390-L401
[ "def", "get_data", "(", "self", ",", "data_split", "=", "\"train\"", ")", ":", "if", "data_split", "==", "'train'", ":", "return", "self", ".", "_current_train_set", "elif", "data_split", "==", "'valid'", ":", "return", "self", ".", "_current_valid_set", "elif", "data_split", "==", "'test'", ":", "return", "self", ".", "_current_test_set", "else", ":", "return", "None" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralTrainer.run
Run until the end. :param epoch_controllers: deprecated
deepy/trainers/base.py
def run(self, train_set, valid_set=None, test_set=None, train_size=None, epoch_controllers=None): """ Run until the end. :param epoch_controllers: deprecated """ epoch_controllers = epoch_controllers if epoch_controllers else [] epoch_controllers += self._epoch_controllers if isinstance(train_set, Dataset): dataset = train_set train_set = dataset.train_set() valid_set = dataset.valid_set() test_set = dataset.test_set() train_size = dataset.train_size() self._current_train_set = train_set self._current_valid_set = valid_set self._current_test_set = test_set if epoch_controllers: for controller in epoch_controllers: controller.bind(self) timer = Timer() for _ in self.train(train_set, valid_set=valid_set, test_set=test_set, train_size=train_size): if epoch_controllers: for controller in epoch_controllers: controller.invoke() if self._ended: break if self._report_time: timer.report()
def run(self, train_set, valid_set=None, test_set=None, train_size=None, epoch_controllers=None): """ Run until the end. :param epoch_controllers: deprecated """ epoch_controllers = epoch_controllers if epoch_controllers else [] epoch_controllers += self._epoch_controllers if isinstance(train_set, Dataset): dataset = train_set train_set = dataset.train_set() valid_set = dataset.valid_set() test_set = dataset.test_set() train_size = dataset.train_size() self._current_train_set = train_set self._current_valid_set = valid_set self._current_test_set = test_set if epoch_controllers: for controller in epoch_controllers: controller.bind(self) timer = Timer() for _ in self.train(train_set, valid_set=valid_set, test_set=test_set, train_size=train_size): if epoch_controllers: for controller in epoch_controllers: controller.invoke() if self._ended: break if self._report_time: timer.report()
[ "Run", "until", "the", "end", ".", ":", "param", "epoch_controllers", ":", "deprecated" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/base.py#L403-L430
[ "def", "run", "(", "self", ",", "train_set", ",", "valid_set", "=", "None", ",", "test_set", "=", "None", ",", "train_size", "=", "None", ",", "epoch_controllers", "=", "None", ")", ":", "epoch_controllers", "=", "epoch_controllers", "if", "epoch_controllers", "else", "[", "]", "epoch_controllers", "+=", "self", ".", "_epoch_controllers", "if", "isinstance", "(", "train_set", ",", "Dataset", ")", ":", "dataset", "=", "train_set", "train_set", "=", "dataset", ".", "train_set", "(", ")", "valid_set", "=", "dataset", ".", "valid_set", "(", ")", "test_set", "=", "dataset", ".", "test_set", "(", ")", "train_size", "=", "dataset", ".", "train_size", "(", ")", "self", ".", "_current_train_set", "=", "train_set", "self", ".", "_current_valid_set", "=", "valid_set", "self", ".", "_current_test_set", "=", "test_set", "if", "epoch_controllers", ":", "for", "controller", "in", "epoch_controllers", ":", "controller", ".", "bind", "(", "self", ")", "timer", "=", "Timer", "(", ")", "for", "_", "in", "self", ".", "train", "(", "train_set", ",", "valid_set", "=", "valid_set", ",", "test_set", "=", "test_set", ",", "train_size", "=", "train_size", ")", ":", "if", "epoch_controllers", ":", "for", "controller", "in", "epoch_controllers", ":", "controller", ".", "invoke", "(", ")", "if", "self", ".", "_ended", ":", "break", "if", "self", ".", "_report_time", ":", "timer", ".", "report", "(", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
BunchSequences._cut_to_pieces
:type bunch_stack: list of list of int
deepy/dataset/bunch_seq.py
def _cut_to_pieces(self, bunch_stack): """ :type bunch_stack: list of list of int """ stack_len = len(bunch_stack[0]) for i in xrange(0, stack_len, self.fragment_length): yield np.array(map(lambda stack: stack[i: i + self.fragment_length], bunch_stack))
def _cut_to_pieces(self, bunch_stack): """ :type bunch_stack: list of list of int """ stack_len = len(bunch_stack[0]) for i in xrange(0, stack_len, self.fragment_length): yield np.array(map(lambda stack: stack[i: i + self.fragment_length], bunch_stack))
[ ":", "type", "bunch_stack", ":", "list", "of", "list", "of", "int" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/dataset/bunch_seq.py#L58-L64
[ "def", "_cut_to_pieces", "(", "self", ",", "bunch_stack", ")", ":", "stack_len", "=", "len", "(", "bunch_stack", "[", "0", "]", ")", "for", "i", "in", "xrange", "(", "0", ",", "stack_len", ",", "self", ".", "fragment_length", ")", ":", "yield", "np", ".", "array", "(", "map", "(", "lambda", "stack", ":", "stack", "[", "i", ":", "i", "+", "self", ".", "fragment_length", "]", ",", "bunch_stack", ")", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
BunchSequences._pad_zeros
:type bunch_stack: list of list
deepy/dataset/bunch_seq.py
def _pad_zeros(self, bunch_stack): """ :type bunch_stack: list of list """ min_len = min(map(len, bunch_stack)) for i in range(len(bunch_stack)): bunch_stack[i] = bunch_stack[i][:min_len]
def _pad_zeros(self, bunch_stack): """ :type bunch_stack: list of list """ min_len = min(map(len, bunch_stack)) for i in range(len(bunch_stack)): bunch_stack[i] = bunch_stack[i][:min_len]
[ ":", "type", "bunch_stack", ":", "list", "of", "list" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/dataset/bunch_seq.py#L66-L72
[ "def", "_pad_zeros", "(", "self", ",", "bunch_stack", ")", ":", "min_len", "=", "min", "(", "map", "(", "len", ",", "bunch_stack", ")", ")", "for", "i", "in", "range", "(", "len", "(", "bunch_stack", ")", ")", ":", "bunch_stack", "[", "i", "]", "=", "bunch_stack", "[", "i", "]", "[", ":", "min_len", "]" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralVariable.apply
Apply a function to tensors.
deepy/core/neural_var.py
def apply(self, func, dim=None): """ Apply a function to tensors. """ output_dim = dim if dim else self.output_dim return NeuralVariable(func(self.tensor), output_dim)
def apply(self, func, dim=None): """ Apply a function to tensors. """ output_dim = dim if dim else self.output_dim return NeuralVariable(func(self.tensor), output_dim)
[ "Apply", "a", "function", "to", "tensors", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/core/neural_var.py#L29-L34
[ "def", "apply", "(", "self", ",", "func", ",", "dim", "=", "None", ")", ":", "output_dim", "=", "dim", "if", "dim", "else", "self", ".", "output_dim", "return", "NeuralVariable", "(", "func", "(", "self", ".", "tensor", ")", ",", "output_dim", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
rprop_core
Rprop optimizer. See http://sci2s.ugr.es/keel/pdf/algorithm/articulo/2003-Neuro-Igel-IRprop+.pdf.
deepy/trainers/cores/rprop.py
def rprop_core(params, gradients, rprop_increase=1.01, rprop_decrease=0.99, rprop_min_step=0, rprop_max_step=100, learning_rate=0.01): """ Rprop optimizer. See http://sci2s.ugr.es/keel/pdf/algorithm/articulo/2003-Neuro-Igel-IRprop+.pdf. """ for param, grad in zip(params, gradients): grad_tm1 = theano.shared(np.zeros_like(param.get_value()), name=param.name + '_grad') step_tm1 = theano.shared(np.zeros_like(param.get_value()) + learning_rate, name=param.name+ '_step') test = grad * grad_tm1 same = T.gt(test, 0) diff = T.lt(test, 0) step = T.minimum(rprop_max_step, T.maximum(rprop_min_step, step_tm1 * ( T.eq(test, 0) + same * rprop_increase + diff * rprop_decrease))) grad = grad - diff * grad yield param, param - T.sgn(grad) * step yield grad_tm1, grad yield step_tm1, step
def rprop_core(params, gradients, rprop_increase=1.01, rprop_decrease=0.99, rprop_min_step=0, rprop_max_step=100, learning_rate=0.01): """ Rprop optimizer. See http://sci2s.ugr.es/keel/pdf/algorithm/articulo/2003-Neuro-Igel-IRprop+.pdf. """ for param, grad in zip(params, gradients): grad_tm1 = theano.shared(np.zeros_like(param.get_value()), name=param.name + '_grad') step_tm1 = theano.shared(np.zeros_like(param.get_value()) + learning_rate, name=param.name+ '_step') test = grad * grad_tm1 same = T.gt(test, 0) diff = T.lt(test, 0) step = T.minimum(rprop_max_step, T.maximum(rprop_min_step, step_tm1 * ( T.eq(test, 0) + same * rprop_increase + diff * rprop_decrease))) grad = grad - diff * grad yield param, param - T.sgn(grad) * step yield grad_tm1, grad yield step_tm1, step
[ "Rprop", "optimizer", ".", "See", "http", ":", "//", "sci2s", ".", "ugr", ".", "es", "/", "keel", "/", "pdf", "/", "algorithm", "/", "articulo", "/", "2003", "-", "Neuro", "-", "Igel", "-", "IRprop", "+", ".", "pdf", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/cores/rprop.py#L8-L28
[ "def", "rprop_core", "(", "params", ",", "gradients", ",", "rprop_increase", "=", "1.01", ",", "rprop_decrease", "=", "0.99", ",", "rprop_min_step", "=", "0", ",", "rprop_max_step", "=", "100", ",", "learning_rate", "=", "0.01", ")", ":", "for", "param", ",", "grad", "in", "zip", "(", "params", ",", "gradients", ")", ":", "grad_tm1", "=", "theano", ".", "shared", "(", "np", ".", "zeros_like", "(", "param", ".", "get_value", "(", ")", ")", ",", "name", "=", "param", ".", "name", "+", "'_grad'", ")", "step_tm1", "=", "theano", ".", "shared", "(", "np", ".", "zeros_like", "(", "param", ".", "get_value", "(", ")", ")", "+", "learning_rate", ",", "name", "=", "param", ".", "name", "+", "'_step'", ")", "test", "=", "grad", "*", "grad_tm1", "same", "=", "T", ".", "gt", "(", "test", ",", "0", ")", "diff", "=", "T", ".", "lt", "(", "test", ",", "0", ")", "step", "=", "T", ".", "minimum", "(", "rprop_max_step", ",", "T", ".", "maximum", "(", "rprop_min_step", ",", "step_tm1", "*", "(", "T", ".", "eq", "(", "test", ",", "0", ")", "+", "same", "*", "rprop_increase", "+", "diff", "*", "rprop_decrease", ")", ")", ")", "grad", "=", "grad", "-", "diff", "*", "grad", "yield", "param", ",", "param", "-", "T", ".", "sgn", "(", "grad", ")", "*", "step", "yield", "grad_tm1", ",", "grad", "yield", "step_tm1", ",", "step" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
GeneralConfig.report
Report usage of training parameters.
deepy/conf/config.py
def report(self): """ Report usage of training parameters. """ if self.logger: self.logger.info("accessed parameters:") for key in self.used_parameters: self.logger.info(" - %s %s" % (key, "(undefined)" if key in self.undefined_parameters else ""))
def report(self): """ Report usage of training parameters. """ if self.logger: self.logger.info("accessed parameters:") for key in self.used_parameters: self.logger.info(" - %s %s" % (key, "(undefined)" if key in self.undefined_parameters else ""))
[ "Report", "usage", "of", "training", "parameters", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/conf/config.py#L39-L46
[ "def", "report", "(", "self", ")", ":", "if", "self", ".", "logger", ":", "self", ".", "logger", ".", "info", "(", "\"accessed parameters:\"", ")", "for", "key", "in", "self", ".", "used_parameters", ":", "self", ".", "logger", ".", "info", "(", "\" - %s %s\"", "%", "(", "key", ",", "\"(undefined)\"", "if", "key", "in", "self", ".", "undefined_parameters", "else", "\"\"", ")", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
GraphBuilder.new_block
Create a parameters block. :param layers: register some layers in the block :param name: specify the name of this block
deepy/core/graph.py
def new_block(self, *layers, **kwargs): """ Create a parameters block. :param layers: register some layers in the block :param name: specify the name of this block """ from deepy.layers.block import Block block = Block(*layers, **kwargs) return block
def new_block(self, *layers, **kwargs): """ Create a parameters block. :param layers: register some layers in the block :param name: specify the name of this block """ from deepy.layers.block import Block block = Block(*layers, **kwargs) return block
[ "Create", "a", "parameters", "block", ".", ":", "param", "layers", ":", "register", "some", "layers", "in", "the", "block", ":", "param", "name", ":", "specify", "the", "name", "of", "this", "block" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/core/graph.py#L39-L47
[ "def", "new_block", "(", "self", ",", "*", "layers", ",", "*", "*", "kwargs", ")", ":", "from", "deepy", ".", "layers", ".", "block", "import", "Block", "block", "=", "Block", "(", "*", "layers", ",", "*", "*", "kwargs", ")", "return", "block" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
GraphBuilder.var
An alias of deepy.tensor.var.
deepy/core/graph.py
def var(self, tensor_type, last_dim=0, test_shape=None): """ An alias of deepy.tensor.var. """ from deepy.tensor import var return var(tensor_type, last_dim=last_dim, test_shape=test_shape)
def var(self, tensor_type, last_dim=0, test_shape=None): """ An alias of deepy.tensor.var. """ from deepy.tensor import var return var(tensor_type, last_dim=last_dim, test_shape=test_shape)
[ "An", "alias", "of", "deepy", ".", "tensor", ".", "var", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/core/graph.py#L49-L54
[ "def", "var", "(", "self", ",", "tensor_type", ",", "last_dim", "=", "0", ",", "test_shape", "=", "None", ")", ":", "from", "deepy", ".", "tensor", "import", "var", "return", "var", "(", "tensor_type", ",", "last_dim", "=", "last_dim", ",", "test_shape", "=", "test_shape", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
GraphBuilder.create_vars_from_data
Create vars given a dataset and set test values. Useful when dataset is already defined.
deepy/core/graph.py
def create_vars_from_data(self, dataset, split="train"): """ Create vars given a dataset and set test values. Useful when dataset is already defined. """ from deepy.core.neural_var import NeuralVariable vars = [] if split == "valid": data_split = dataset.valid_set() elif split == "test": data_split = dataset.test_set() else: data_split = dataset.train_set() first_data_piece = list(data_split)[0] for i, numpy_tensor in enumerate(first_data_piece): if numpy_tensor.dtype == "int64": numpy_tensor = numpy_tensor.astype("int32") if numpy_tensor.dtype == "float64": numpy_tensor = numpy_tensor.astype(env.FLOATX) type_map = { 0: "scalar", 1: "vector", 2: "matrix", 3: "tensor3", 4: "tensor4", 5: "tensor5", } tensor_type = type_map[numpy_tensor.ndim] if numpy_tensor.ndim in type_map else type_map[0] if numpy_tensor.dtype.kind == "i": tensor_type = "i" + tensor_type theano_tensor = getattr(TT, tensor_type)("input_{}_{}".format(i + 1, tensor_type)) last_dim = numpy_tensor.shape[-1] var = NeuralVariable(theano_tensor, dim=last_dim) var.set_test_value(numpy_tensor) vars.append(var) return vars
def create_vars_from_data(self, dataset, split="train"): """ Create vars given a dataset and set test values. Useful when dataset is already defined. """ from deepy.core.neural_var import NeuralVariable vars = [] if split == "valid": data_split = dataset.valid_set() elif split == "test": data_split = dataset.test_set() else: data_split = dataset.train_set() first_data_piece = list(data_split)[0] for i, numpy_tensor in enumerate(first_data_piece): if numpy_tensor.dtype == "int64": numpy_tensor = numpy_tensor.astype("int32") if numpy_tensor.dtype == "float64": numpy_tensor = numpy_tensor.astype(env.FLOATX) type_map = { 0: "scalar", 1: "vector", 2: "matrix", 3: "tensor3", 4: "tensor4", 5: "tensor5", } tensor_type = type_map[numpy_tensor.ndim] if numpy_tensor.ndim in type_map else type_map[0] if numpy_tensor.dtype.kind == "i": tensor_type = "i" + tensor_type theano_tensor = getattr(TT, tensor_type)("input_{}_{}".format(i + 1, tensor_type)) last_dim = numpy_tensor.shape[-1] var = NeuralVariable(theano_tensor, dim=last_dim) var.set_test_value(numpy_tensor) vars.append(var) return vars
[ "Create", "vars", "given", "a", "dataset", "and", "set", "test", "values", ".", "Useful", "when", "dataset", "is", "already", "defined", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/core/graph.py#L56-L91
[ "def", "create_vars_from_data", "(", "self", ",", "dataset", ",", "split", "=", "\"train\"", ")", ":", "from", "deepy", ".", "core", ".", "neural_var", "import", "NeuralVariable", "vars", "=", "[", "]", "if", "split", "==", "\"valid\"", ":", "data_split", "=", "dataset", ".", "valid_set", "(", ")", "elif", "split", "==", "\"test\"", ":", "data_split", "=", "dataset", ".", "test_set", "(", ")", "else", ":", "data_split", "=", "dataset", ".", "train_set", "(", ")", "first_data_piece", "=", "list", "(", "data_split", ")", "[", "0", "]", "for", "i", ",", "numpy_tensor", "in", "enumerate", "(", "first_data_piece", ")", ":", "if", "numpy_tensor", ".", "dtype", "==", "\"int64\"", ":", "numpy_tensor", "=", "numpy_tensor", ".", "astype", "(", "\"int32\"", ")", "if", "numpy_tensor", ".", "dtype", "==", "\"float64\"", ":", "numpy_tensor", "=", "numpy_tensor", ".", "astype", "(", "env", ".", "FLOATX", ")", "type_map", "=", "{", "0", ":", "\"scalar\"", ",", "1", ":", "\"vector\"", ",", "2", ":", "\"matrix\"", ",", "3", ":", "\"tensor3\"", ",", "4", ":", "\"tensor4\"", ",", "5", ":", "\"tensor5\"", ",", "}", "tensor_type", "=", "type_map", "[", "numpy_tensor", ".", "ndim", "]", "if", "numpy_tensor", ".", "ndim", "in", "type_map", "else", "type_map", "[", "0", "]", "if", "numpy_tensor", ".", "dtype", ".", "kind", "==", "\"i\"", ":", "tensor_type", "=", "\"i\"", "+", "tensor_type", "theano_tensor", "=", "getattr", "(", "TT", ",", "tensor_type", ")", "(", "\"input_{}_{}\"", ".", "format", "(", "i", "+", "1", ",", "tensor_type", ")", ")", "last_dim", "=", "numpy_tensor", ".", "shape", "[", "-", "1", "]", "var", "=", "NeuralVariable", "(", "theano_tensor", ",", "dim", "=", "last_dim", ")", "var", ".", "set_test_value", "(", "numpy_tensor", ")", "vars", ".", "append", "(", "var", ")", "return", "vars" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
GraphBuilder.scan
A loop function, the usage is identical with the theano one. :type block: deepy.layers.Block
deepy/core/graph.py
def scan(self, func, sequences=None, outputs=None, non_sequences=None, block=None, **kwargs): """ A loop function, the usage is identical with the theano one. :type block: deepy.layers.Block """ results, updates = Scanner(func, sequences, outputs, non_sequences, neural_computation=True, **kwargs).compute() if block and updates: if type(updates) == dict: updates = updates.items() block.register_updates(*updates) return results
def scan(self, func, sequences=None, outputs=None, non_sequences=None, block=None, **kwargs): """ A loop function, the usage is identical with the theano one. :type block: deepy.layers.Block """ results, updates = Scanner(func, sequences, outputs, non_sequences, neural_computation=True, **kwargs).compute() if block and updates: if type(updates) == dict: updates = updates.items() block.register_updates(*updates) return results
[ "A", "loop", "function", "the", "usage", "is", "identical", "with", "the", "theano", "one", ".", ":", "type", "block", ":", "deepy", ".", "layers", ".", "Block" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/core/graph.py#L94-L104
[ "def", "scan", "(", "self", ",", "func", ",", "sequences", "=", "None", ",", "outputs", "=", "None", ",", "non_sequences", "=", "None", ",", "block", "=", "None", ",", "*", "*", "kwargs", ")", ":", "results", ",", "updates", "=", "Scanner", "(", "func", ",", "sequences", ",", "outputs", ",", "non_sequences", ",", "neural_computation", "=", "True", ",", "*", "*", "kwargs", ")", ".", "compute", "(", ")", "if", "block", "and", "updates", ":", "if", "type", "(", "updates", ")", "==", "dict", ":", "updates", "=", "updates", ".", "items", "(", ")", "block", ".", "register_updates", "(", "*", "updates", ")", "return", "results" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
GraphBuilder.loop
Start a loop. Usage: ``` with deepy.graph.loop(sequences={"x": x}, outputs={"o": None}) as vars: vars.o = vars.x + 1 loop_outputs = deepy.graph.loop_outputs() result = loop_outputs.o ```
deepy/core/graph.py
def loop(self, sequences=None, outputs=None, non_sequences=None, block=None, **kwargs): """ Start a loop. Usage: ``` with deepy.graph.loop(sequences={"x": x}, outputs={"o": None}) as vars: vars.o = vars.x + 1 loop_outputs = deepy.graph.loop_outputs() result = loop_outputs.o ``` """ from loop import Loop return Loop(sequences, outputs, non_sequences, block, **kwargs)
def loop(self, sequences=None, outputs=None, non_sequences=None, block=None, **kwargs): """ Start a loop. Usage: ``` with deepy.graph.loop(sequences={"x": x}, outputs={"o": None}) as vars: vars.o = vars.x + 1 loop_outputs = deepy.graph.loop_outputs() result = loop_outputs.o ``` """ from loop import Loop return Loop(sequences, outputs, non_sequences, block, **kwargs)
[ "Start", "a", "loop", ".", "Usage", ":", "with", "deepy", ".", "graph", ".", "loop", "(", "sequences", "=", "{", "x", ":", "x", "}", "outputs", "=", "{", "o", ":", "None", "}", ")", "as", "vars", ":", "vars", ".", "o", "=", "vars", ".", "x", "+", "1", "loop_outputs", "=", "deepy", ".", "graph", ".", "loop_outputs", "()", "result", "=", "loop_outputs", ".", "o" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/core/graph.py#L106-L118
[ "def", "loop", "(", "self", ",", "sequences", "=", "None", ",", "outputs", "=", "None", ",", "non_sequences", "=", "None", ",", "block", "=", "None", ",", "*", "*", "kwargs", ")", ":", "from", "loop", "import", "Loop", "return", "Loop", "(", "sequences", ",", "outputs", ",", "non_sequences", ",", "block", ",", "*", "*", "kwargs", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
GraphBuilder.get_trainer
Get a trainer to optimize given model. :rtype: deepy.trainers.GeneralNeuralTrainer
deepy/core/graph.py
def get_trainer(self, model, method='sgd', config=None, annealer=None, validator=None): """ Get a trainer to optimize given model. :rtype: deepy.trainers.GeneralNeuralTrainer """ from deepy.trainers import GeneralNeuralTrainer return GeneralNeuralTrainer(model, method=method, config=config, annealer=annealer, validator=validator)
def get_trainer(self, model, method='sgd', config=None, annealer=None, validator=None): """ Get a trainer to optimize given model. :rtype: deepy.trainers.GeneralNeuralTrainer """ from deepy.trainers import GeneralNeuralTrainer return GeneralNeuralTrainer(model, method=method, config=config, annealer=annealer, validator=validator)
[ "Get", "a", "trainer", "to", "optimize", "given", "model", ".", ":", "rtype", ":", "deepy", ".", "trainers", ".", "GeneralNeuralTrainer" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/core/graph.py#L120-L126
[ "def", "get_trainer", "(", "self", ",", "model", ",", "method", "=", "'sgd'", ",", "config", "=", "None", ",", "annealer", "=", "None", ",", "validator", "=", "None", ")", ":", "from", "deepy", ".", "trainers", "import", "GeneralNeuralTrainer", "return", "GeneralNeuralTrainer", "(", "model", ",", "method", "=", "method", ",", "config", "=", "config", ",", "annealer", "=", "annealer", ",", "validator", "=", "validator", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
GraphBuilder.shared
Create a shared theano scalar value.
deepy/core/graph.py
def shared(self, value, name=None): """ Create a shared theano scalar value. """ if type(value) == int: final_value = np.array(value, dtype="int32") elif type(value) == float: final_value = np.array(value, dtype=env.FLOATX) else: final_value = value return theano.shared(final_value, name=name)
def shared(self, value, name=None): """ Create a shared theano scalar value. """ if type(value) == int: final_value = np.array(value, dtype="int32") elif type(value) == float: final_value = np.array(value, dtype=env.FLOATX) else: final_value = value return theano.shared(final_value, name=name)
[ "Create", "a", "shared", "theano", "scalar", "value", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/core/graph.py#L129-L140
[ "def", "shared", "(", "self", ",", "value", ",", "name", "=", "None", ")", ":", "if", "type", "(", "value", ")", "==", "int", ":", "final_value", "=", "np", ".", "array", "(", "value", ",", "dtype", "=", "\"int32\"", ")", "elif", "type", "(", "value", ")", "==", "float", ":", "final_value", "=", "np", ".", "array", "(", "value", ",", "dtype", "=", "env", ".", "FLOATX", ")", "else", ":", "final_value", "=", "value", "return", "theano", ".", "shared", "(", "final_value", ",", "name", "=", "name", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
GraphBuilder.fill_parameters
Load parameters from file to fill all blocks sequentially. :type blocks: list of deepy.layers.Block
deepy/core/graph.py
def fill_parameters(self, path, blocks, exclude_free_params=False, check_parameters=False): """ Load parameters from file to fill all blocks sequentially. :type blocks: list of deepy.layers.Block """ if not os.path.exists(path): raise Exception("model {} does not exist".format(path)) # Decide which parameters to load normal_params = sum([nn.parameters for nn in blocks], []) all_params = sum([nn.all_parameters for nn in blocks], []) # Load parameters if path.endswith(".gz"): opener = gzip.open if path.lower().endswith('.gz') else open handle = opener(path, 'rb') saved_params = pickle.load(handle) handle.close() # Write parameters if len(all_params) != len(saved_params): logging.warning( "parameters in the network: {}, parameters in the dumped model: {}".format(len(all_params), len(saved_params))) for target, source in zip(all_params, saved_params): if not exclude_free_params or target not in normal_params: target.set_value(source) elif path.endswith(".npz"): arrs = np.load(path) # Write parameters if len(all_params) != len(arrs.keys()): logging.warning( "parameters in the network: {}, parameters in the dumped model: {}".format(len(all_params), len(arrs.keys()))) for target, idx in zip(all_params, range(len(arrs.keys()))): if not exclude_free_params or target not in normal_params: source = arrs['arr_%d' % idx] target.set_value(source) else: raise Exception("File format of %s is not supported, use '.gz' or '.npz' or '.uncompressed.gz'" % path)
def fill_parameters(self, path, blocks, exclude_free_params=False, check_parameters=False): """ Load parameters from file to fill all blocks sequentially. :type blocks: list of deepy.layers.Block """ if not os.path.exists(path): raise Exception("model {} does not exist".format(path)) # Decide which parameters to load normal_params = sum([nn.parameters for nn in blocks], []) all_params = sum([nn.all_parameters for nn in blocks], []) # Load parameters if path.endswith(".gz"): opener = gzip.open if path.lower().endswith('.gz') else open handle = opener(path, 'rb') saved_params = pickle.load(handle) handle.close() # Write parameters if len(all_params) != len(saved_params): logging.warning( "parameters in the network: {}, parameters in the dumped model: {}".format(len(all_params), len(saved_params))) for target, source in zip(all_params, saved_params): if not exclude_free_params or target not in normal_params: target.set_value(source) elif path.endswith(".npz"): arrs = np.load(path) # Write parameters if len(all_params) != len(arrs.keys()): logging.warning( "parameters in the network: {}, parameters in the dumped model: {}".format(len(all_params), len(arrs.keys()))) for target, idx in zip(all_params, range(len(arrs.keys()))): if not exclude_free_params or target not in normal_params: source = arrs['arr_%d' % idx] target.set_value(source) else: raise Exception("File format of %s is not supported, use '.gz' or '.npz' or '.uncompressed.gz'" % path)
[ "Load", "parameters", "from", "file", "to", "fill", "all", "blocks", "sequentially", ".", ":", "type", "blocks", ":", "list", "of", "deepy", ".", "layers", ".", "Block" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/core/graph.py#L158-L194
[ "def", "fill_parameters", "(", "self", ",", "path", ",", "blocks", ",", "exclude_free_params", "=", "False", ",", "check_parameters", "=", "False", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "raise", "Exception", "(", "\"model {} does not exist\"", ".", "format", "(", "path", ")", ")", "# Decide which parameters to load", "normal_params", "=", "sum", "(", "[", "nn", ".", "parameters", "for", "nn", "in", "blocks", "]", ",", "[", "]", ")", "all_params", "=", "sum", "(", "[", "nn", ".", "all_parameters", "for", "nn", "in", "blocks", "]", ",", "[", "]", ")", "# Load parameters", "if", "path", ".", "endswith", "(", "\".gz\"", ")", ":", "opener", "=", "gzip", ".", "open", "if", "path", ".", "lower", "(", ")", ".", "endswith", "(", "'.gz'", ")", "else", "open", "handle", "=", "opener", "(", "path", ",", "'rb'", ")", "saved_params", "=", "pickle", ".", "load", "(", "handle", ")", "handle", ".", "close", "(", ")", "# Write parameters", "if", "len", "(", "all_params", ")", "!=", "len", "(", "saved_params", ")", ":", "logging", ".", "warning", "(", "\"parameters in the network: {}, parameters in the dumped model: {}\"", ".", "format", "(", "len", "(", "all_params", ")", ",", "len", "(", "saved_params", ")", ")", ")", "for", "target", ",", "source", "in", "zip", "(", "all_params", ",", "saved_params", ")", ":", "if", "not", "exclude_free_params", "or", "target", "not", "in", "normal_params", ":", "target", ".", "set_value", "(", "source", ")", "elif", "path", ".", "endswith", "(", "\".npz\"", ")", ":", "arrs", "=", "np", ".", "load", "(", "path", ")", "# Write parameters", "if", "len", "(", "all_params", ")", "!=", "len", "(", "arrs", ".", "keys", "(", ")", ")", ":", "logging", ".", "warning", "(", "\"parameters in the network: {}, parameters in the dumped model: {}\"", ".", "format", "(", "len", "(", "all_params", ")", ",", "len", "(", "arrs", ".", "keys", "(", ")", ")", ")", ")", "for", "target", ",", "idx", "in", "zip", "(", "all_params", ",", "range", "(", "len", "(", "arrs", ".", "keys", "(", ")", ")", ")", ")", ":", "if", "not", "exclude_free_params", "or", "target", "not", "in", "normal_params", ":", "source", "=", "arrs", "[", "'arr_%d'", "%", "idx", "]", "target", ".", "set_value", "(", "source", ")", "else", ":", "raise", "Exception", "(", "\"File format of %s is not supported, use '.gz' or '.npz' or '.uncompressed.gz'\"", "%", "path", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
Dataset.train_size
Return size of training data. (optional) :rtype: number
deepy/dataset/dataset.py
def train_size(self): """ Return size of training data. (optional) :rtype: number """ train_set = self.train_set() if isinstance(train_set, collections.Iterable): return len(list(train_set)) else: return None
def train_size(self): """ Return size of training data. (optional) :rtype: number """ train_set = self.train_set() if isinstance(train_set, collections.Iterable): return len(list(train_set)) else: return None
[ "Return", "size", "of", "training", "data", ".", "(", "optional", ")", ":", "rtype", ":", "number" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/dataset/dataset.py#L29-L38
[ "def", "train_size", "(", "self", ")", ":", "train_set", "=", "self", ".", "train_set", "(", ")", "if", "isinstance", "(", "train_set", ",", "collections", ".", "Iterable", ")", ":", "return", "len", "(", "list", "(", "train_set", ")", ")", "else", ":", "return", "None" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
LearningRateAnnealer.invoke
Run it, return whether to end training.
deepy/trainers/annealers.py
def invoke(self): """ Run it, return whether to end training. """ self._iter += 1 if self._iter - max(self._trainer.best_iter, self._annealed_iter) >= self._patience: if self._annealed_times >= self._anneal_times: logging.info("ending") self._trainer.exit() else: self._trainer.set_params(*self._trainer.best_params) self._learning_rate.set_value(self._learning_rate.get_value() * 0.5) self._annealed_times += 1 self._annealed_iter = self._iter logging.info("annealed learning rate to %f" % self._learning_rate.get_value())
def invoke(self): """ Run it, return whether to end training. """ self._iter += 1 if self._iter - max(self._trainer.best_iter, self._annealed_iter) >= self._patience: if self._annealed_times >= self._anneal_times: logging.info("ending") self._trainer.exit() else: self._trainer.set_params(*self._trainer.best_params) self._learning_rate.set_value(self._learning_rate.get_value() * 0.5) self._annealed_times += 1 self._annealed_iter = self._iter logging.info("annealed learning rate to %f" % self._learning_rate.get_value())
[ "Run", "it", "return", "whether", "to", "end", "training", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/annealers.py#L37-L51
[ "def", "invoke", "(", "self", ")", ":", "self", ".", "_iter", "+=", "1", "if", "self", ".", "_iter", "-", "max", "(", "self", ".", "_trainer", ".", "best_iter", ",", "self", ".", "_annealed_iter", ")", ">=", "self", ".", "_patience", ":", "if", "self", ".", "_annealed_times", ">=", "self", ".", "_anneal_times", ":", "logging", ".", "info", "(", "\"ending\"", ")", "self", ".", "_trainer", ".", "exit", "(", ")", "else", ":", "self", ".", "_trainer", ".", "set_params", "(", "*", "self", ".", "_trainer", ".", "best_params", ")", "self", ".", "_learning_rate", ".", "set_value", "(", "self", ".", "_learning_rate", ".", "get_value", "(", ")", "*", "0.5", ")", "self", ".", "_annealed_times", "+=", "1", "self", ".", "_annealed_iter", "=", "self", ".", "_iter", "logging", ".", "info", "(", "\"annealed learning rate to %f\"", "%", "self", ".", "_learning_rate", ".", "get_value", "(", ")", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
SimpleScheduler.invoke
Run it, return whether to end training.
deepy/trainers/annealers.py
def invoke(self): """ Run it, return whether to end training. """ self._iter += 1 logging.info("{} epochs left to run".format(self._patience - self._iter)) if self._iter >= self._patience: self._trainer.exit()
def invoke(self): """ Run it, return whether to end training. """ self._iter += 1 logging.info("{} epochs left to run".format(self._patience - self._iter)) if self._iter >= self._patience: self._trainer.exit()
[ "Run", "it", "return", "whether", "to", "end", "training", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/annealers.py#L131-L138
[ "def", "invoke", "(", "self", ")", ":", "self", ".", "_iter", "+=", "1", "logging", ".", "info", "(", "\"{} epochs left to run\"", ".", "format", "(", "self", ".", "_patience", "-", "self", ".", "_iter", ")", ")", "if", "self", ".", "_iter", ">=", "self", ".", "_patience", ":", "self", ".", "_trainer", ".", "exit", "(", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
VariationalAutoEncoder.stack_reparameterization_layer
Perform reparameterization trick for latent variables. :param layer_size: the size of latent variable
examples/variational_autoencoder/variational_autoencoder.py
def stack_reparameterization_layer(self, layer_size): """ Perform reparameterization trick for latent variables. :param layer_size: the size of latent variable """ self.rep_layer = ReparameterizationLayer(layer_size, sample=self.sample) self.stack_encoders(self.rep_layer)
def stack_reparameterization_layer(self, layer_size): """ Perform reparameterization trick for latent variables. :param layer_size: the size of latent variable """ self.rep_layer = ReparameterizationLayer(layer_size, sample=self.sample) self.stack_encoders(self.rep_layer)
[ "Perform", "reparameterization", "trick", "for", "latent", "variables", ".", ":", "param", "layer_size", ":", "the", "size", "of", "latent", "variable" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/examples/variational_autoencoder/variational_autoencoder.py#L69-L75
[ "def", "stack_reparameterization_layer", "(", "self", ",", "layer_size", ")", ":", "self", ".", "rep_layer", "=", "ReparameterizationLayer", "(", "layer_size", ",", "sample", "=", "self", ".", "sample", ")", "self", ".", "stack_encoders", "(", "self", ".", "rep_layer", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
AutoEncoder.stack_encoders
Stack encoding layers, this must be done before stacking decoding layers.
deepy/networks/auto_encoder.py
def stack_encoders(self, *layers): """ Stack encoding layers, this must be done before stacking decoding layers. """ self.stack(*layers) self.encoding_layes.extend(layers)
def stack_encoders(self, *layers): """ Stack encoding layers, this must be done before stacking decoding layers. """ self.stack(*layers) self.encoding_layes.extend(layers)
[ "Stack", "encoding", "layers", "this", "must", "be", "done", "before", "stacking", "decoding", "layers", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/networks/auto_encoder.py#L41-L46
[ "def", "stack_encoders", "(", "self", ",", "*", "layers", ")", ":", "self", ".", "stack", "(", "*", "layers", ")", "self", ".", "encoding_layes", ".", "extend", "(", "layers", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
AutoEncoder.stack_decoders
Stack decoding layers.
deepy/networks/auto_encoder.py
def stack_decoders(self, *layers): """ Stack decoding layers. """ self.stack(*layers) self.decoding_layers.extend(layers)
def stack_decoders(self, *layers): """ Stack decoding layers. """ self.stack(*layers) self.decoding_layers.extend(layers)
[ "Stack", "decoding", "layers", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/networks/auto_encoder.py#L48-L53
[ "def", "stack_decoders", "(", "self", ",", "*", "layers", ")", ":", "self", ".", "stack", "(", "*", "layers", ")", "self", ".", "decoding_layers", ".", "extend", "(", "layers", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
AutoEncoder.encode
Encode given input.
deepy/networks/auto_encoder.py
def encode(self, x): """ Encode given input. """ if not self.encoding_network: self.encoding_network = NeuralNetwork(self.input_dim, self.input_tensor) self.encoding_network.input_variables = self.input_variables for layer in self.encoding_layes: self.encoding_network.stack_layer(layer, no_setup=True) return self.encoding_network.compute(*x)
def encode(self, x): """ Encode given input. """ if not self.encoding_network: self.encoding_network = NeuralNetwork(self.input_dim, self.input_tensor) self.encoding_network.input_variables = self.input_variables for layer in self.encoding_layes: self.encoding_network.stack_layer(layer, no_setup=True) return self.encoding_network.compute(*x)
[ "Encode", "given", "input", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/networks/auto_encoder.py#L55-L64
[ "def", "encode", "(", "self", ",", "x", ")", ":", "if", "not", "self", ".", "encoding_network", ":", "self", ".", "encoding_network", "=", "NeuralNetwork", "(", "self", ".", "input_dim", ",", "self", ".", "input_tensor", ")", "self", ".", "encoding_network", ".", "input_variables", "=", "self", ".", "input_variables", "for", "layer", "in", "self", ".", "encoding_layes", ":", "self", ".", "encoding_network", ".", "stack_layer", "(", "layer", ",", "no_setup", "=", "True", ")", "return", "self", ".", "encoding_network", ".", "compute", "(", "*", "x", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
AutoEncoder.decode
Decode given representation.
deepy/networks/auto_encoder.py
def decode(self, x): """ Decode given representation. """ if not self.rep_dim: raise Exception("rep_dim must be set to decode.") if not self.decoding_network: self.decoding_network = NeuralNetwork(self.rep_dim) for layer in self.decoding_layers: self.decoding_network.stack_layer(layer, no_setup=True) return self.decoding_network.compute(x)
def decode(self, x): """ Decode given representation. """ if not self.rep_dim: raise Exception("rep_dim must be set to decode.") if not self.decoding_network: self.decoding_network = NeuralNetwork(self.rep_dim) for layer in self.decoding_layers: self.decoding_network.stack_layer(layer, no_setup=True) return self.decoding_network.compute(x)
[ "Decode", "given", "representation", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/networks/auto_encoder.py#L66-L76
[ "def", "decode", "(", "self", ",", "x", ")", ":", "if", "not", "self", ".", "rep_dim", ":", "raise", "Exception", "(", "\"rep_dim must be set to decode.\"", ")", "if", "not", "self", ".", "decoding_network", ":", "self", ".", "decoding_network", "=", "NeuralNetwork", "(", "self", ".", "rep_dim", ")", "for", "layer", "in", "self", ".", "decoding_layers", ":", "self", ".", "decoding_network", ".", "stack_layer", "(", "layer", ",", "no_setup", "=", "True", ")", "return", "self", ".", "decoding_network", ".", "compute", "(", "x", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
create_2d_gaussian
This function creates a 2d gaussian kernel with the standard deviation denoted by sigma :param dim: integer denoting a side (1-d) of gaussian kernel :param sigma: floating point indicating the standard deviation :returns: a numpy 2d array
deepy/preprocessing/elastic_distortion.py
def create_2d_gaussian(dim, sigma): """ This function creates a 2d gaussian kernel with the standard deviation denoted by sigma :param dim: integer denoting a side (1-d) of gaussian kernel :param sigma: floating point indicating the standard deviation :returns: a numpy 2d array """ # check if the dimension is odd if dim % 2 == 0: raise ValueError("Kernel dimension should be odd") # initialize the kernel kernel = np.zeros((dim, dim), dtype=np.float16) # calculate the center point center = dim/2 # calculate the variance variance = sigma ** 2 # calculate the normalization coefficeint coeff = 1. / (2 * variance) # create the kernel for x in range(0, dim): for y in range(0, dim): x_val = abs(x - center) y_val = abs(y - center) numerator = x_val**2 + y_val**2 denom = 2*variance kernel[x,y] = coeff * np.exp(-1. * numerator/denom) return kernel/sum(sum(kernel))
def create_2d_gaussian(dim, sigma): """ This function creates a 2d gaussian kernel with the standard deviation denoted by sigma :param dim: integer denoting a side (1-d) of gaussian kernel :param sigma: floating point indicating the standard deviation :returns: a numpy 2d array """ # check if the dimension is odd if dim % 2 == 0: raise ValueError("Kernel dimension should be odd") # initialize the kernel kernel = np.zeros((dim, dim), dtype=np.float16) # calculate the center point center = dim/2 # calculate the variance variance = sigma ** 2 # calculate the normalization coefficeint coeff = 1. / (2 * variance) # create the kernel for x in range(0, dim): for y in range(0, dim): x_val = abs(x - center) y_val = abs(y - center) numerator = x_val**2 + y_val**2 denom = 2*variance kernel[x,y] = coeff * np.exp(-1. * numerator/denom) return kernel/sum(sum(kernel))
[ "This", "function", "creates", "a", "2d", "gaussian", "kernel", "with", "the", "standard", "deviation", "denoted", "by", "sigma" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/preprocessing/elastic_distortion.py#L17-L54
[ "def", "create_2d_gaussian", "(", "dim", ",", "sigma", ")", ":", "# check if the dimension is odd", "if", "dim", "%", "2", "==", "0", ":", "raise", "ValueError", "(", "\"Kernel dimension should be odd\"", ")", "# initialize the kernel", "kernel", "=", "np", ".", "zeros", "(", "(", "dim", ",", "dim", ")", ",", "dtype", "=", "np", ".", "float16", ")", "# calculate the center point", "center", "=", "dim", "/", "2", "# calculate the variance", "variance", "=", "sigma", "**", "2", "# calculate the normalization coefficeint", "coeff", "=", "1.", "/", "(", "2", "*", "variance", ")", "# create the kernel", "for", "x", "in", "range", "(", "0", ",", "dim", ")", ":", "for", "y", "in", "range", "(", "0", ",", "dim", ")", ":", "x_val", "=", "abs", "(", "x", "-", "center", ")", "y_val", "=", "abs", "(", "y", "-", "center", ")", "numerator", "=", "x_val", "**", "2", "+", "y_val", "**", "2", "denom", "=", "2", "*", "variance", "kernel", "[", "x", ",", "y", "]", "=", "coeff", "*", "np", ".", "exp", "(", "-", "1.", "*", "numerator", "/", "denom", ")", "return", "kernel", "/", "sum", "(", "sum", "(", "kernel", ")", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
elastic_distortion
This method performs elastic transformations on an image by convolving with a gaussian kernel. :param image: a numpy nd array :kernel_dim: dimension(1-D) of the gaussian kernel :param sigma: standard deviation of the kernel :param alpha: a multiplicative factor for image after convolution :param negated: a flag indicating whether the image is negated or not :returns: a nd array transformed image
deepy/preprocessing/elastic_distortion.py
def elastic_distortion(image, kernel_dim=21, sigma=6, alpha=30, negated=True): """ This method performs elastic transformations on an image by convolving with a gaussian kernel. :param image: a numpy nd array :kernel_dim: dimension(1-D) of the gaussian kernel :param sigma: standard deviation of the kernel :param alpha: a multiplicative factor for image after convolution :param negated: a flag indicating whether the image is negated or not :returns: a nd array transformed image """ # check if the image is a negated one if not negated: image = 255-image # check if kernel dimesnion is odd if kernel_dim % 2 == 0: raise ValueError("Kernel dimension should be odd") # create an empty image result = np.zeros(image.shape) # create random displacement fields displacement_field_x = np.array([[env.numpy_rand.random_integers(-1, 1) for x in xrange(image.shape[0])] \ for y in xrange(image.shape[1])]) * alpha displacement_field_y = np.array([[env.numpy_rand.random_integers(-1, 1) for x in xrange(image.shape[0])] \ for y in xrange(image.shape[1])]) * alpha # create the gaussian kernel kernel = create_2d_gaussian(kernel_dim, sigma) # convolve the fields with the gaussian kernel displacement_field_x = convolve2d(displacement_field_x, kernel) displacement_field_y = convolve2d(displacement_field_y, kernel) # make the distortrd image by averaging each pixel value to the neighbouring # four pixels based on displacement fields for row in xrange(image.shape[1]): for col in xrange(image.shape[0]): low_ii = row + int(math.floor(displacement_field_x[row, col])) high_ii = row + int(math.ceil(displacement_field_x[row, col])) low_jj = col + int(math.floor(displacement_field_y[row, col])) high_jj = col + int(math.ceil(displacement_field_y[row, col])) if low_ii < 0 or low_jj < 0 or high_ii >= image.shape[1] -1 \ or high_jj >= image.shape[0] - 1: continue res = image[low_ii, low_jj]/4 + image[low_ii, high_jj]/4 + \ image[high_ii, low_jj]/4 + image[high_ii, high_jj]/4 result[row, col] = res return result
def elastic_distortion(image, kernel_dim=21, sigma=6, alpha=30, negated=True): """ This method performs elastic transformations on an image by convolving with a gaussian kernel. :param image: a numpy nd array :kernel_dim: dimension(1-D) of the gaussian kernel :param sigma: standard deviation of the kernel :param alpha: a multiplicative factor for image after convolution :param negated: a flag indicating whether the image is negated or not :returns: a nd array transformed image """ # check if the image is a negated one if not negated: image = 255-image # check if kernel dimesnion is odd if kernel_dim % 2 == 0: raise ValueError("Kernel dimension should be odd") # create an empty image result = np.zeros(image.shape) # create random displacement fields displacement_field_x = np.array([[env.numpy_rand.random_integers(-1, 1) for x in xrange(image.shape[0])] \ for y in xrange(image.shape[1])]) * alpha displacement_field_y = np.array([[env.numpy_rand.random_integers(-1, 1) for x in xrange(image.shape[0])] \ for y in xrange(image.shape[1])]) * alpha # create the gaussian kernel kernel = create_2d_gaussian(kernel_dim, sigma) # convolve the fields with the gaussian kernel displacement_field_x = convolve2d(displacement_field_x, kernel) displacement_field_y = convolve2d(displacement_field_y, kernel) # make the distortrd image by averaging each pixel value to the neighbouring # four pixels based on displacement fields for row in xrange(image.shape[1]): for col in xrange(image.shape[0]): low_ii = row + int(math.floor(displacement_field_x[row, col])) high_ii = row + int(math.ceil(displacement_field_x[row, col])) low_jj = col + int(math.floor(displacement_field_y[row, col])) high_jj = col + int(math.ceil(displacement_field_y[row, col])) if low_ii < 0 or low_jj < 0 or high_ii >= image.shape[1] -1 \ or high_jj >= image.shape[0] - 1: continue res = image[low_ii, low_jj]/4 + image[low_ii, high_jj]/4 + \ image[high_ii, low_jj]/4 + image[high_ii, high_jj]/4 result[row, col] = res return result
[ "This", "method", "performs", "elastic", "transformations", "on", "an", "image", "by", "convolving", "with", "a", "gaussian", "kernel", ".", ":", "param", "image", ":", "a", "numpy", "nd", "array", ":", "kernel_dim", ":", "dimension", "(", "1", "-", "D", ")", "of", "the", "gaussian", "kernel", ":", "param", "sigma", ":", "standard", "deviation", "of", "the", "kernel", ":", "param", "alpha", ":", "a", "multiplicative", "factor", "for", "image", "after", "convolution", ":", "param", "negated", ":", "a", "flag", "indicating", "whether", "the", "image", "is", "negated", "or", "not", ":", "returns", ":", "a", "nd", "array", "transformed", "image" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/preprocessing/elastic_distortion.py#L57-L112
[ "def", "elastic_distortion", "(", "image", ",", "kernel_dim", "=", "21", ",", "sigma", "=", "6", ",", "alpha", "=", "30", ",", "negated", "=", "True", ")", ":", "# check if the image is a negated one", "if", "not", "negated", ":", "image", "=", "255", "-", "image", "# check if kernel dimesnion is odd", "if", "kernel_dim", "%", "2", "==", "0", ":", "raise", "ValueError", "(", "\"Kernel dimension should be odd\"", ")", "# create an empty image", "result", "=", "np", ".", "zeros", "(", "image", ".", "shape", ")", "# create random displacement fields", "displacement_field_x", "=", "np", ".", "array", "(", "[", "[", "env", ".", "numpy_rand", ".", "random_integers", "(", "-", "1", ",", "1", ")", "for", "x", "in", "xrange", "(", "image", ".", "shape", "[", "0", "]", ")", "]", "for", "y", "in", "xrange", "(", "image", ".", "shape", "[", "1", "]", ")", "]", ")", "*", "alpha", "displacement_field_y", "=", "np", ".", "array", "(", "[", "[", "env", ".", "numpy_rand", ".", "random_integers", "(", "-", "1", ",", "1", ")", "for", "x", "in", "xrange", "(", "image", ".", "shape", "[", "0", "]", ")", "]", "for", "y", "in", "xrange", "(", "image", ".", "shape", "[", "1", "]", ")", "]", ")", "*", "alpha", "# create the gaussian kernel", "kernel", "=", "create_2d_gaussian", "(", "kernel_dim", ",", "sigma", ")", "# convolve the fields with the gaussian kernel", "displacement_field_x", "=", "convolve2d", "(", "displacement_field_x", ",", "kernel", ")", "displacement_field_y", "=", "convolve2d", "(", "displacement_field_y", ",", "kernel", ")", "# make the distortrd image by averaging each pixel value to the neighbouring", "# four pixels based on displacement fields", "for", "row", "in", "xrange", "(", "image", ".", "shape", "[", "1", "]", ")", ":", "for", "col", "in", "xrange", "(", "image", ".", "shape", "[", "0", "]", ")", ":", "low_ii", "=", "row", "+", "int", "(", "math", ".", "floor", "(", "displacement_field_x", "[", "row", ",", "col", "]", ")", ")", "high_ii", "=", "row", "+", "int", "(", "math", ".", "ceil", "(", "displacement_field_x", "[", "row", ",", "col", "]", ")", ")", "low_jj", "=", "col", "+", "int", "(", "math", ".", "floor", "(", "displacement_field_y", "[", "row", ",", "col", "]", ")", ")", "high_jj", "=", "col", "+", "int", "(", "math", ".", "ceil", "(", "displacement_field_y", "[", "row", ",", "col", "]", ")", ")", "if", "low_ii", "<", "0", "or", "low_jj", "<", "0", "or", "high_ii", ">=", "image", ".", "shape", "[", "1", "]", "-", "1", "or", "high_jj", ">=", "image", ".", "shape", "[", "0", "]", "-", "1", ":", "continue", "res", "=", "image", "[", "low_ii", ",", "low_jj", "]", "/", "4", "+", "image", "[", "low_ii", ",", "high_jj", "]", "/", "4", "+", "image", "[", "high_ii", ",", "low_jj", "]", "/", "4", "+", "image", "[", "high_ii", ",", "high_jj", "]", "/", "4", "result", "[", "row", ",", "col", "]", "=", "res", "return", "result" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralNetwork.stack_layer
Stack a neural layer. :type layer: NeuralLayer :param no_setup: whether the layer is already initialized
deepy/networks/network.py
def stack_layer(self, layer, no_setup=False): """ Stack a neural layer. :type layer: NeuralLayer :param no_setup: whether the layer is already initialized """ if layer.name: layer.name += "%d" % (len(self.layers) + 1) if not self.layers: layer.init(self.input_dim, no_prepare=no_setup) else: layer.init(self.layers[-1].output_dim, no_prepare=no_setup) self._output = layer.compute_tensor(self._output) self._test_output = layer.compute_tensor(self._test_output) self._hidden_outputs.append(self._output) self.register_layer(layer) self.layers.append(layer)
def stack_layer(self, layer, no_setup=False): """ Stack a neural layer. :type layer: NeuralLayer :param no_setup: whether the layer is already initialized """ if layer.name: layer.name += "%d" % (len(self.layers) + 1) if not self.layers: layer.init(self.input_dim, no_prepare=no_setup) else: layer.init(self.layers[-1].output_dim, no_prepare=no_setup) self._output = layer.compute_tensor(self._output) self._test_output = layer.compute_tensor(self._test_output) self._hidden_outputs.append(self._output) self.register_layer(layer) self.layers.append(layer)
[ "Stack", "a", "neural", "layer", ".", ":", "type", "layer", ":", "NeuralLayer", ":", "param", "no_setup", ":", "whether", "the", "layer", "is", "already", "initialized" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/networks/network.py#L79-L95
[ "def", "stack_layer", "(", "self", ",", "layer", ",", "no_setup", "=", "False", ")", ":", "if", "layer", ".", "name", ":", "layer", ".", "name", "+=", "\"%d\"", "%", "(", "len", "(", "self", ".", "layers", ")", "+", "1", ")", "if", "not", "self", ".", "layers", ":", "layer", ".", "init", "(", "self", ".", "input_dim", ",", "no_prepare", "=", "no_setup", ")", "else", ":", "layer", ".", "init", "(", "self", ".", "layers", "[", "-", "1", "]", ".", "output_dim", ",", "no_prepare", "=", "no_setup", ")", "self", ".", "_output", "=", "layer", ".", "compute_tensor", "(", "self", ".", "_output", ")", "self", ".", "_test_output", "=", "layer", ".", "compute_tensor", "(", "self", ".", "_test_output", ")", "self", ".", "_hidden_outputs", ".", "append", "(", "self", ".", "_output", ")", "self", ".", "register_layer", "(", "layer", ")", "self", ".", "layers", ".", "append", "(", "layer", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralNetwork.register_layer
Register the layer so that it's param will be trained. But the output of the layer will not be stacked.
deepy/networks/network.py
def register_layer(self, layer): """ Register the layer so that it's param will be trained. But the output of the layer will not be stacked. """ if type(layer) == Block: layer.fix() self.parameter_count += layer.parameter_count self.parameters.extend(layer.parameters) self.free_parameters.extend(layer.free_parameters) self.training_monitors.extend(layer.training_monitors) self.testing_monitors.extend(layer.testing_monitors) self.updates.extend(layer.updates) self.training_updates.extend(layer.training_updates) self.input_variables.extend(layer.external_inputs) self.target_variables.extend(layer.external_targets) self.training_callbacks.extend(layer.training_callbacks) self.testing_callbacks.extend(layer.testing_callbacks) self.epoch_callbacks.extend(layer.epoch_callbacks)
def register_layer(self, layer): """ Register the layer so that it's param will be trained. But the output of the layer will not be stacked. """ if type(layer) == Block: layer.fix() self.parameter_count += layer.parameter_count self.parameters.extend(layer.parameters) self.free_parameters.extend(layer.free_parameters) self.training_monitors.extend(layer.training_monitors) self.testing_monitors.extend(layer.testing_monitors) self.updates.extend(layer.updates) self.training_updates.extend(layer.training_updates) self.input_variables.extend(layer.external_inputs) self.target_variables.extend(layer.external_targets) self.training_callbacks.extend(layer.training_callbacks) self.testing_callbacks.extend(layer.testing_callbacks) self.epoch_callbacks.extend(layer.epoch_callbacks)
[ "Register", "the", "layer", "so", "that", "it", "s", "param", "will", "be", "trained", ".", "But", "the", "output", "of", "the", "layer", "will", "not", "be", "stacked", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/networks/network.py#L106-L125
[ "def", "register_layer", "(", "self", ",", "layer", ")", ":", "if", "type", "(", "layer", ")", "==", "Block", ":", "layer", ".", "fix", "(", ")", "self", ".", "parameter_count", "+=", "layer", ".", "parameter_count", "self", ".", "parameters", ".", "extend", "(", "layer", ".", "parameters", ")", "self", ".", "free_parameters", ".", "extend", "(", "layer", ".", "free_parameters", ")", "self", ".", "training_monitors", ".", "extend", "(", "layer", ".", "training_monitors", ")", "self", ".", "testing_monitors", ".", "extend", "(", "layer", ".", "testing_monitors", ")", "self", ".", "updates", ".", "extend", "(", "layer", ".", "updates", ")", "self", ".", "training_updates", ".", "extend", "(", "layer", ".", "training_updates", ")", "self", ".", "input_variables", ".", "extend", "(", "layer", ".", "external_inputs", ")", "self", ".", "target_variables", ".", "extend", "(", "layer", ".", "external_targets", ")", "self", ".", "training_callbacks", ".", "extend", "(", "layer", ".", "training_callbacks", ")", "self", ".", "testing_callbacks", ".", "extend", "(", "layer", ".", "testing_callbacks", ")", "self", ".", "epoch_callbacks", ".", "extend", "(", "layer", ".", "epoch_callbacks", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralNetwork.monitor_layer_outputs
Monitoring the outputs of each layer. Useful for troubleshooting convergence problems.
deepy/networks/network.py
def monitor_layer_outputs(self): """ Monitoring the outputs of each layer. Useful for troubleshooting convergence problems. """ for layer, hidden in zip(self.layers, self._hidden_outputs): self.training_monitors.append(('mean(%s)' % (layer.name), abs(hidden).mean()))
def monitor_layer_outputs(self): """ Monitoring the outputs of each layer. Useful for troubleshooting convergence problems. """ for layer, hidden in zip(self.layers, self._hidden_outputs): self.training_monitors.append(('mean(%s)' % (layer.name), abs(hidden).mean()))
[ "Monitoring", "the", "outputs", "of", "each", "layer", ".", "Useful", "for", "troubleshooting", "convergence", "problems", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/networks/network.py#L147-L153
[ "def", "monitor_layer_outputs", "(", "self", ")", ":", "for", "layer", ",", "hidden", "in", "zip", "(", "self", ".", "layers", ",", "self", ".", "_hidden_outputs", ")", ":", "self", ".", "training_monitors", ".", "append", "(", "(", "'mean(%s)'", "%", "(", "layer", ".", "name", ")", ",", "abs", "(", "hidden", ")", ".", "mean", "(", ")", ")", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralNetwork.all_parameters
Return all parameters.
deepy/networks/network.py
def all_parameters(self): """ Return all parameters. """ params = [] params.extend(self.parameters) params.extend(self.free_parameters) return params
def all_parameters(self): """ Return all parameters. """ params = [] params.extend(self.parameters) params.extend(self.free_parameters) return params
[ "Return", "all", "parameters", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/networks/network.py#L156-L164
[ "def", "all_parameters", "(", "self", ")", ":", "params", "=", "[", "]", "params", ".", "extend", "(", "self", ".", "parameters", ")", "params", ".", "extend", "(", "self", ".", "free_parameters", ")", "return", "params" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralNetwork.setup_variables
Set up variables.
deepy/networks/network.py
def setup_variables(self): """ Set up variables. """ if self.input_tensor: if type(self.input_tensor) == int: x = dim_to_var(self.input_tensor, name="x") else: x = self.input_tensor else: x = T.matrix('x') self.input_variables.append(x) self._output = x self._test_output = x
def setup_variables(self): """ Set up variables. """ if self.input_tensor: if type(self.input_tensor) == int: x = dim_to_var(self.input_tensor, name="x") else: x = self.input_tensor else: x = T.matrix('x') self.input_variables.append(x) self._output = x self._test_output = x
[ "Set", "up", "variables", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/networks/network.py#L166-L179
[ "def", "setup_variables", "(", "self", ")", ":", "if", "self", ".", "input_tensor", ":", "if", "type", "(", "self", ".", "input_tensor", ")", "==", "int", ":", "x", "=", "dim_to_var", "(", "self", ".", "input_tensor", ",", "name", "=", "\"x\"", ")", "else", ":", "x", "=", "self", ".", "input_tensor", "else", ":", "x", "=", "T", ".", "matrix", "(", "'x'", ")", "self", ".", "input_variables", ".", "append", "(", "x", ")", "self", ".", "_output", "=", "x", "self", ".", "_test_output", "=", "x" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralNetwork.compute
Return network output.
deepy/networks/network.py
def compute(self, *x): """ Return network output. """ self._compile() outs = self._compute(*x) if self._output_keys: return MapDict(dict(zip(self._output_keys, outs))) else: return outs
def compute(self, *x): """ Return network output. """ self._compile() outs = self._compute(*x) if self._output_keys: return MapDict(dict(zip(self._output_keys, outs))) else: return outs
[ "Return", "network", "output", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/networks/network.py#L198-L207
[ "def", "compute", "(", "self", ",", "*", "x", ")", ":", "self", ".", "_compile", "(", ")", "outs", "=", "self", ".", "_compute", "(", "*", "x", ")", "if", "self", ".", "_output_keys", ":", "return", "MapDict", "(", "dict", "(", "zip", "(", "self", ".", "_output_keys", ",", "outs", ")", ")", ")", "else", ":", "return", "outs" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralNetwork.save_params
Save parameters to file.
deepy/networks/network.py
def save_params(self, path, new_thread=False): """ Save parameters to file. """ save_logger.info(path) param_variables = self.all_parameters params = [p.get_value().copy() for p in param_variables] if new_thread: thread = Thread(target=save_network_params, args=(params, path)) thread.start() else: save_network_params(params, path) self.train_logger.save(path)
def save_params(self, path, new_thread=False): """ Save parameters to file. """ save_logger.info(path) param_variables = self.all_parameters params = [p.get_value().copy() for p in param_variables] if new_thread: thread = Thread(target=save_network_params, args=(params, path)) thread.start() else: save_network_params(params, path) self.train_logger.save(path)
[ "Save", "parameters", "to", "file", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/networks/network.py#L237-L249
[ "def", "save_params", "(", "self", ",", "path", ",", "new_thread", "=", "False", ")", ":", "save_logger", ".", "info", "(", "path", ")", "param_variables", "=", "self", ".", "all_parameters", "params", "=", "[", "p", ".", "get_value", "(", ")", ".", "copy", "(", ")", "for", "p", "in", "param_variables", "]", "if", "new_thread", ":", "thread", "=", "Thread", "(", "target", "=", "save_network_params", ",", "args", "=", "(", "params", ",", "path", ")", ")", "thread", ".", "start", "(", ")", "else", ":", "save_network_params", "(", "params", ",", "path", ")", "self", ".", "train_logger", ".", "save", "(", "path", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralNetwork.load_params
Load parameters from file.
deepy/networks/network.py
def load_params(self, path, exclude_free_params=False): """ Load parameters from file. """ if not os.path.exists(path): return; logging.info("loading parameters from %s" % path) # Decide which parameters to load if exclude_free_params: params_to_load = self.parameters else: params_to_load = self.all_parameters # Load parameters if path.endswith(".gz"): opener = gzip.open if path.lower().endswith('.gz') else open handle = opener(path, 'rb') saved_params = pickle.load(handle) handle.close() # Write parameters for target, source in zip(params_to_load, saved_params): logging.info('%s: setting value %s', target.name, source.shape) target.set_value(source) elif path.endswith(".npz"): arrs = np.load(path) # Write parameters for target, idx in zip(params_to_load, range(len(arrs.keys()))): source = arrs['arr_%d' % idx] logging.info('%s: setting value %s', target.name, source.shape) target.set_value(source) else: raise Exception("File format of %s is not supported, use '.gz' or '.npz' or '.uncompressed.gz'" % path) self.train_logger.load(path)
def load_params(self, path, exclude_free_params=False): """ Load parameters from file. """ if not os.path.exists(path): return; logging.info("loading parameters from %s" % path) # Decide which parameters to load if exclude_free_params: params_to_load = self.parameters else: params_to_load = self.all_parameters # Load parameters if path.endswith(".gz"): opener = gzip.open if path.lower().endswith('.gz') else open handle = opener(path, 'rb') saved_params = pickle.load(handle) handle.close() # Write parameters for target, source in zip(params_to_load, saved_params): logging.info('%s: setting value %s', target.name, source.shape) target.set_value(source) elif path.endswith(".npz"): arrs = np.load(path) # Write parameters for target, idx in zip(params_to_load, range(len(arrs.keys()))): source = arrs['arr_%d' % idx] logging.info('%s: setting value %s', target.name, source.shape) target.set_value(source) else: raise Exception("File format of %s is not supported, use '.gz' or '.npz' or '.uncompressed.gz'" % path) self.train_logger.load(path)
[ "Load", "parameters", "from", "file", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/networks/network.py#L251-L282
[ "def", "load_params", "(", "self", ",", "path", ",", "exclude_free_params", "=", "False", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "return", "logging", ".", "info", "(", "\"loading parameters from %s\"", "%", "path", ")", "# Decide which parameters to load", "if", "exclude_free_params", ":", "params_to_load", "=", "self", ".", "parameters", "else", ":", "params_to_load", "=", "self", ".", "all_parameters", "# Load parameters", "if", "path", ".", "endswith", "(", "\".gz\"", ")", ":", "opener", "=", "gzip", ".", "open", "if", "path", ".", "lower", "(", ")", ".", "endswith", "(", "'.gz'", ")", "else", "open", "handle", "=", "opener", "(", "path", ",", "'rb'", ")", "saved_params", "=", "pickle", ".", "load", "(", "handle", ")", "handle", ".", "close", "(", ")", "# Write parameters", "for", "target", ",", "source", "in", "zip", "(", "params_to_load", ",", "saved_params", ")", ":", "logging", ".", "info", "(", "'%s: setting value %s'", ",", "target", ".", "name", ",", "source", ".", "shape", ")", "target", ".", "set_value", "(", "source", ")", "elif", "path", ".", "endswith", "(", "\".npz\"", ")", ":", "arrs", "=", "np", ".", "load", "(", "path", ")", "# Write parameters", "for", "target", ",", "idx", "in", "zip", "(", "params_to_load", ",", "range", "(", "len", "(", "arrs", ".", "keys", "(", ")", ")", ")", ")", ":", "source", "=", "arrs", "[", "'arr_%d'", "%", "idx", "]", "logging", ".", "info", "(", "'%s: setting value %s'", ",", "target", ".", "name", ",", "source", ".", "shape", ")", "target", ".", "set_value", "(", "source", ")", "else", ":", "raise", "Exception", "(", "\"File format of %s is not supported, use '.gz' or '.npz' or '.uncompressed.gz'\"", "%", "path", ")", "self", ".", "train_logger", ".", "load", "(", "path", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralNetwork.report
Print network statistics.
deepy/networks/network.py
def report(self): """ Print network statistics. """ logging.info("network inputs: %s", " ".join(map(str, self.input_variables))) logging.info("network targets: %s", " ".join(map(str, self.target_variables))) logging.info("network parameters: %s", " ".join(map(str, self.all_parameters))) logging.info("parameter count: %d", self.parameter_count)
def report(self): """ Print network statistics. """ logging.info("network inputs: %s", " ".join(map(str, self.input_variables))) logging.info("network targets: %s", " ".join(map(str, self.target_variables))) logging.info("network parameters: %s", " ".join(map(str, self.all_parameters))) logging.info("parameter count: %d", self.parameter_count)
[ "Print", "network", "statistics", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/networks/network.py#L284-L291
[ "def", "report", "(", "self", ")", ":", "logging", ".", "info", "(", "\"network inputs: %s\"", ",", "\" \"", ".", "join", "(", "map", "(", "str", ",", "self", ".", "input_variables", ")", ")", ")", "logging", ".", "info", "(", "\"network targets: %s\"", ",", "\" \"", ".", "join", "(", "map", "(", "str", ",", "self", ".", "target_variables", ")", ")", ")", "logging", ".", "info", "(", "\"network parameters: %s\"", ",", "\" \"", ".", "join", "(", "map", "(", "str", ",", "self", ".", "all_parameters", ")", ")", ")", "logging", ".", "info", "(", "\"parameter count: %d\"", ",", "self", ".", "parameter_count", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralLayer.init
Initialize the layer. :param no_prepare: avoid calling preparation function
deepy/layers/layer.py
def init(self, input_dim=0, input_dims=None, no_prepare=False): """ Initialize the layer. :param no_prepare: avoid calling preparation function """ if self.initialized: return # configure input dimensions if input_dims: self.input_dims = input_dims self.input_dim = input_dims[0] else: self.input_dim = input_dim self.input_dims = [input_dims] # set default output dimension if self.output_dim == 0: self.output_dim = self.input_dim self.initialized = True # call prepare if not no_prepare: self.prepare() return self
def init(self, input_dim=0, input_dims=None, no_prepare=False): """ Initialize the layer. :param no_prepare: avoid calling preparation function """ if self.initialized: return # configure input dimensions if input_dims: self.input_dims = input_dims self.input_dim = input_dims[0] else: self.input_dim = input_dim self.input_dims = [input_dims] # set default output dimension if self.output_dim == 0: self.output_dim = self.input_dim self.initialized = True # call prepare if not no_prepare: self.prepare() return self
[ "Initialize", "the", "layer", ".", ":", "param", "no_prepare", ":", "avoid", "calling", "preparation", "function" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/layers/layer.py#L47-L68
[ "def", "init", "(", "self", ",", "input_dim", "=", "0", ",", "input_dims", "=", "None", ",", "no_prepare", "=", "False", ")", ":", "if", "self", ".", "initialized", ":", "return", "# configure input dimensions", "if", "input_dims", ":", "self", ".", "input_dims", "=", "input_dims", "self", ".", "input_dim", "=", "input_dims", "[", "0", "]", "else", ":", "self", ".", "input_dim", "=", "input_dim", "self", ".", "input_dims", "=", "[", "input_dims", "]", "# set default output dimension", "if", "self", ".", "output_dim", "==", "0", ":", "self", ".", "output_dim", "=", "self", ".", "input_dim", "self", ".", "initialized", "=", "True", "# call prepare", "if", "not", "no_prepare", ":", "self", ".", "prepare", "(", ")", "return", "self" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralLayer.compute
Compute based on NeuralVariable. :type inputs: list of NeuralVariable :return: NeuralVariable
deepy/layers/layer.py
def compute(self, *inputs, **kwargs): """ Compute based on NeuralVariable. :type inputs: list of NeuralVariable :return: NeuralVariable """ from deepy.core.neural_var import NeuralVariable from deepy.core.graph import graph if type(inputs[0]) != NeuralVariable: raise SystemError("The input of `compute` must be NeuralVar") dims = [t.dim() for t in inputs] if len(inputs) == 1: self.init(input_dim=dims[0]) else: self.init(input_dims=dims) # Check block if self.parameters and not self._linked_block: self.belongs_to(graph.default_block()) # convert kwargs train_kwargs, _, _ = convert_to_theano_var(kwargs) output = self.compute_tensor(*[t.tensor for t in inputs], **train_kwargs) if type(output) != list and type(output) != tuple: return NeuralVariable(output, dim=self.output_dim) else: return [NeuralVariable(*item) for item in zip(output, self.output_dims)]
def compute(self, *inputs, **kwargs): """ Compute based on NeuralVariable. :type inputs: list of NeuralVariable :return: NeuralVariable """ from deepy.core.neural_var import NeuralVariable from deepy.core.graph import graph if type(inputs[0]) != NeuralVariable: raise SystemError("The input of `compute` must be NeuralVar") dims = [t.dim() for t in inputs] if len(inputs) == 1: self.init(input_dim=dims[0]) else: self.init(input_dims=dims) # Check block if self.parameters and not self._linked_block: self.belongs_to(graph.default_block()) # convert kwargs train_kwargs, _, _ = convert_to_theano_var(kwargs) output = self.compute_tensor(*[t.tensor for t in inputs], **train_kwargs) if type(output) != list and type(output) != tuple: return NeuralVariable(output, dim=self.output_dim) else: return [NeuralVariable(*item) for item in zip(output, self.output_dims)]
[ "Compute", "based", "on", "NeuralVariable", ".", ":", "type", "inputs", ":", "list", "of", "NeuralVariable", ":", "return", ":", "NeuralVariable" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/layers/layer.py#L70-L97
[ "def", "compute", "(", "self", ",", "*", "inputs", ",", "*", "*", "kwargs", ")", ":", "from", "deepy", ".", "core", ".", "neural_var", "import", "NeuralVariable", "from", "deepy", ".", "core", ".", "graph", "import", "graph", "if", "type", "(", "inputs", "[", "0", "]", ")", "!=", "NeuralVariable", ":", "raise", "SystemError", "(", "\"The input of `compute` must be NeuralVar\"", ")", "dims", "=", "[", "t", ".", "dim", "(", ")", "for", "t", "in", "inputs", "]", "if", "len", "(", "inputs", ")", "==", "1", ":", "self", ".", "init", "(", "input_dim", "=", "dims", "[", "0", "]", ")", "else", ":", "self", ".", "init", "(", "input_dims", "=", "dims", ")", "# Check block", "if", "self", ".", "parameters", "and", "not", "self", ".", "_linked_block", ":", "self", ".", "belongs_to", "(", "graph", ".", "default_block", "(", ")", ")", "# convert kwargs", "train_kwargs", ",", "_", ",", "_", "=", "convert_to_theano_var", "(", "kwargs", ")", "output", "=", "self", ".", "compute_tensor", "(", "*", "[", "t", ".", "tensor", "for", "t", "in", "inputs", "]", ",", "*", "*", "train_kwargs", ")", "if", "type", "(", "output", ")", "!=", "list", "and", "type", "(", "output", ")", "!=", "tuple", ":", "return", "NeuralVariable", "(", "output", ",", "dim", "=", "self", ".", "output_dim", ")", "else", ":", "return", "[", "NeuralVariable", "(", "*", "item", ")", "for", "item", "in", "zip", "(", "output", ",", "self", ".", "output_dims", ")", "]" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralLayer.belongs_to
Let the given block or network manage the parameters of this layer. :param block: Block or NeuralNetwork :return: NeuralLayer
deepy/layers/layer.py
def belongs_to(self, block): """ Let the given block or network manage the parameters of this layer. :param block: Block or NeuralNetwork :return: NeuralLayer """ if self._linked_block: raise SystemError("The layer {} has already blonged to {}".format(self.name, self._linked_block.name)) self._linked_block = block block.register_layer(self) return self
def belongs_to(self, block): """ Let the given block or network manage the parameters of this layer. :param block: Block or NeuralNetwork :return: NeuralLayer """ if self._linked_block: raise SystemError("The layer {} has already blonged to {}".format(self.name, self._linked_block.name)) self._linked_block = block block.register_layer(self) return self
[ "Let", "the", "given", "block", "or", "network", "manage", "the", "parameters", "of", "this", "layer", ".", ":", "param", "block", ":", "Block", "or", "NeuralNetwork", ":", "return", ":", "NeuralLayer" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/layers/layer.py#L111-L121
[ "def", "belongs_to", "(", "self", ",", "block", ")", ":", "if", "self", ".", "_linked_block", ":", "raise", "SystemError", "(", "\"The layer {} has already blonged to {}\"", ".", "format", "(", "self", ".", "name", ",", "self", ".", "_linked_block", ".", "name", ")", ")", "self", ".", "_linked_block", "=", "block", "block", ".", "register_layer", "(", "self", ")", "return", "self" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralLayer.register_parameters
Register parameters.
deepy/layers/layer.py
def register_parameters(self, *parameters): """ Register parameters. """ for param in parameters: self.parameter_count += np.prod(param.get_value().shape) self.parameters.extend(parameters)
def register_parameters(self, *parameters): """ Register parameters. """ for param in parameters: self.parameter_count += np.prod(param.get_value().shape) self.parameters.extend(parameters)
[ "Register", "parameters", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/layers/layer.py#L137-L143
[ "def", "register_parameters", "(", "self", ",", "*", "parameters", ")", ":", "for", "param", "in", "parameters", ":", "self", ".", "parameter_count", "+=", "np", ".", "prod", "(", "param", ".", "get_value", "(", ")", ".", "shape", ")", "self", ".", "parameters", ".", "extend", "(", "parameters", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralLayer.register_updates
Register updates that will be executed in each iteration.
deepy/layers/layer.py
def register_updates(self, *updates): """ Register updates that will be executed in each iteration. """ for key, node in updates: if key not in self._registered_updates: self.updates.append((key, node)) self._registered_updates.add(key)
def register_updates(self, *updates): """ Register updates that will be executed in each iteration. """ for key, node in updates: if key not in self._registered_updates: self.updates.append((key, node)) self._registered_updates.add(key)
[ "Register", "updates", "that", "will", "be", "executed", "in", "each", "iteration", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/layers/layer.py#L151-L158
[ "def", "register_updates", "(", "self", ",", "*", "updates", ")", ":", "for", "key", ",", "node", "in", "updates", ":", "if", "key", "not", "in", "self", ".", "_registered_updates", ":", "self", ".", "updates", ".", "append", "(", "(", "key", ",", "node", ")", ")", "self", ".", "_registered_updates", ".", "add", "(", "key", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralLayer.register_training_updates
Register updates that will only be executed in training phase.
deepy/layers/layer.py
def register_training_updates(self, *updates): """ Register updates that will only be executed in training phase. """ for key, node in updates: if key not in self._registered_training_updates: self.training_updates.append((key, node)) self._registered_training_updates.add(key)
def register_training_updates(self, *updates): """ Register updates that will only be executed in training phase. """ for key, node in updates: if key not in self._registered_training_updates: self.training_updates.append((key, node)) self._registered_training_updates.add(key)
[ "Register", "updates", "that", "will", "only", "be", "executed", "in", "training", "phase", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/layers/layer.py#L160-L167
[ "def", "register_training_updates", "(", "self", ",", "*", "updates", ")", ":", "for", "key", ",", "node", "in", "updates", ":", "if", "key", "not", "in", "self", ".", "_registered_training_updates", ":", "self", ".", "training_updates", ".", "append", "(", "(", "key", ",", "node", ")", ")", "self", ".", "_registered_training_updates", ".", "add", "(", "key", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
NeuralLayer.register_monitors
Register monitors they should be tuple of name and Theano variable.
deepy/layers/layer.py
def register_monitors(self, *monitors): """ Register monitors they should be tuple of name and Theano variable. """ for key, node in monitors: if key not in self._registered_monitors: node *= 1.0 # Avoid CudaNdarray self.training_monitors.append((key, node)) self.testing_monitors.append((key, node)) self._registered_monitors.add(key)
def register_monitors(self, *monitors): """ Register monitors they should be tuple of name and Theano variable. """ for key, node in monitors: if key not in self._registered_monitors: node *= 1.0 # Avoid CudaNdarray self.training_monitors.append((key, node)) self.testing_monitors.append((key, node)) self._registered_monitors.add(key)
[ "Register", "monitors", "they", "should", "be", "tuple", "of", "name", "and", "Theano", "variable", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/layers/layer.py#L169-L178
[ "def", "register_monitors", "(", "self", ",", "*", "monitors", ")", ":", "for", "key", ",", "node", "in", "monitors", ":", "if", "key", "not", "in", "self", ".", "_registered_monitors", ":", "node", "*=", "1.0", "# Avoid CudaNdarray", "self", ".", "training_monitors", ".", "append", "(", "(", "key", ",", "node", ")", ")", "self", ".", "testing_monitors", ".", "append", "(", "(", "key", ",", "node", ")", ")", "self", ".", "_registered_monitors", ".", "add", "(", "key", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
multiple_l2_norm
Get the L2 norm of multiple tensors. This function is taken from blocks.
deepy/trainers/util.py
def multiple_l2_norm(tensors): """ Get the L2 norm of multiple tensors. This function is taken from blocks. """ # Another way for doing this, I don't know which one is fast # return T.sqrt(sum(T.sum(t ** 2) for t in tensors)) flattened = [T.as_tensor_variable(t).flatten() for t in tensors] flattened = [(t if t.ndim > 0 else t.dimshuffle('x')) for t in flattened] joined = T.join(0, *flattened) return T.sqrt(T.sqr(joined).sum())
def multiple_l2_norm(tensors): """ Get the L2 norm of multiple tensors. This function is taken from blocks. """ # Another way for doing this, I don't know which one is fast # return T.sqrt(sum(T.sum(t ** 2) for t in tensors)) flattened = [T.as_tensor_variable(t).flatten() for t in tensors] flattened = [(t if t.ndim > 0 else t.dimshuffle('x')) for t in flattened] joined = T.join(0, *flattened) return T.sqrt(T.sqr(joined).sum())
[ "Get", "the", "L2", "norm", "of", "multiple", "tensors", ".", "This", "function", "is", "taken", "from", "blocks", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/trainers/util.py#L19-L30
[ "def", "multiple_l2_norm", "(", "tensors", ")", ":", "# Another way for doing this, I don't know which one is fast", "# return T.sqrt(sum(T.sum(t ** 2) for t in tensors))", "flattened", "=", "[", "T", ".", "as_tensor_variable", "(", "t", ")", ".", "flatten", "(", ")", "for", "t", "in", "tensors", "]", "flattened", "=", "[", "(", "t", "if", "t", ".", "ndim", ">", "0", "else", "t", ".", "dimshuffle", "(", "'x'", ")", ")", "for", "t", "in", "flattened", "]", "joined", "=", "T", ".", "join", "(", "0", ",", "*", "flattened", ")", "return", "T", ".", "sqrt", "(", "T", ".", "sqr", "(", "joined", ")", ".", "sum", "(", ")", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
StreamPickler.dump_one
dumps one element to file_obj, a file opened in write mode
deepy/utils/stream_pickler.py
def dump_one(elt_to_pickle, file_obj): """ dumps one element to file_obj, a file opened in write mode """ pickled_elt_str = dumps(elt_to_pickle) file_obj.write(pickled_elt_str) # record separator is a blank line # (since pickled_elt_str might contain its own newlines) file_obj.write('\n\n')
def dump_one(elt_to_pickle, file_obj): """ dumps one element to file_obj, a file opened in write mode """ pickled_elt_str = dumps(elt_to_pickle) file_obj.write(pickled_elt_str) # record separator is a blank line # (since pickled_elt_str might contain its own newlines) file_obj.write('\n\n')
[ "dumps", "one", "element", "to", "file_obj", "a", "file", "opened", "in", "write", "mode" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/utils/stream_pickler.py#L25-L33
[ "def", "dump_one", "(", "elt_to_pickle", ",", "file_obj", ")", ":", "pickled_elt_str", "=", "dumps", "(", "elt_to_pickle", ")", "file_obj", ".", "write", "(", "pickled_elt_str", ")", "# record separator is a blank line", "# (since pickled_elt_str might contain its own newlines)", "file_obj", ".", "write", "(", "'\\n\\n'", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
StreamPickler.load
load contents from file_obj, returning a generator that yields one element at a time
deepy/utils/stream_pickler.py
def load(file_obj): """ load contents from file_obj, returning a generator that yields one element at a time """ cur_elt = [] for line in file_obj: cur_elt.append(line) if line == '\n': pickled_elt_str = ''.join(cur_elt) cur_elt = [] try: elt = loads(pickled_elt_str) except ValueError: continue yield elt
def load(file_obj): """ load contents from file_obj, returning a generator that yields one element at a time """ cur_elt = [] for line in file_obj: cur_elt.append(line) if line == '\n': pickled_elt_str = ''.join(cur_elt) cur_elt = [] try: elt = loads(pickled_elt_str) except ValueError: continue yield elt
[ "load", "contents", "from", "file_obj", "returning", "a", "generator", "that", "yields", "one", "element", "at", "a", "time" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/utils/stream_pickler.py#L36-L53
[ "def", "load", "(", "file_obj", ")", ":", "cur_elt", "=", "[", "]", "for", "line", "in", "file_obj", ":", "cur_elt", ".", "append", "(", "line", ")", "if", "line", "==", "'\\n'", ":", "pickled_elt_str", "=", "''", ".", "join", "(", "cur_elt", ")", "cur_elt", "=", "[", "]", "try", ":", "elt", "=", "loads", "(", "pickled_elt_str", ")", "except", "ValueError", ":", "continue", "yield", "elt" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
Block.fix
Fix the block, register all the parameters of sub layers. :return:
deepy/layers/block.py
def fix(self): """ Fix the block, register all the parameters of sub layers. :return: """ if not self.fixed: for layer in self.layers: if not layer.initialized: raise Exception("All sub layers in a block must be initialized when fixing it.") self.register_inner_layers(layer) self.fixed = True
def fix(self): """ Fix the block, register all the parameters of sub layers. :return: """ if not self.fixed: for layer in self.layers: if not layer.initialized: raise Exception("All sub layers in a block must be initialized when fixing it.") self.register_inner_layers(layer) self.fixed = True
[ "Fix", "the", "block", "register", "all", "the", "parameters", "of", "sub", "layers", ".", ":", "return", ":" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/layers/block.py#L24-L34
[ "def", "fix", "(", "self", ")", ":", "if", "not", "self", ".", "fixed", ":", "for", "layer", "in", "self", ".", "layers", ":", "if", "not", "layer", ".", "initialized", ":", "raise", "Exception", "(", "\"All sub layers in a block must be initialized when fixing it.\"", ")", "self", ".", "register_inner_layers", "(", "layer", ")", "self", ".", "fixed", "=", "True" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
Block.register_layer
Register one connected layer. :type layer: NeuralLayer
deepy/layers/block.py
def register_layer(self, layer): """ Register one connected layer. :type layer: NeuralLayer """ if self.fixed: raise Exception("After a block is fixed, no more layers can be registered.") self.layers.append(layer)
def register_layer(self, layer): """ Register one connected layer. :type layer: NeuralLayer """ if self.fixed: raise Exception("After a block is fixed, no more layers can be registered.") self.layers.append(layer)
[ "Register", "one", "connected", "layer", ".", ":", "type", "layer", ":", "NeuralLayer" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/layers/block.py#L51-L58
[ "def", "register_layer", "(", "self", ",", "layer", ")", ":", "if", "self", ".", "fixed", ":", "raise", "Exception", "(", "\"After a block is fixed, no more layers can be registered.\"", ")", "self", ".", "layers", ".", "append", "(", "layer", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
Block.load_params
Load parameters to the block.
deepy/layers/block.py
def load_params(self, path, exclude_free_params=False): from deepy.core import graph """ Load parameters to the block. """ from deepy.core.comp_graph import ComputationalGraph model = graph.compile(blocks=[self]) model.load_params(path, exclude_free_params=exclude_free_params)
def load_params(self, path, exclude_free_params=False): from deepy.core import graph """ Load parameters to the block. """ from deepy.core.comp_graph import ComputationalGraph model = graph.compile(blocks=[self]) model.load_params(path, exclude_free_params=exclude_free_params)
[ "Load", "parameters", "to", "the", "block", "." ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/layers/block.py#L64-L71
[ "def", "load_params", "(", "self", ",", "path", ",", "exclude_free_params", "=", "False", ")", ":", "from", "deepy", ".", "core", "import", "graph", "from", "deepy", ".", "core", ".", "comp_graph", "import", "ComputationalGraph", "model", "=", "graph", ".", "compile", "(", "blocks", "=", "[", "self", "]", ")", "model", ".", "load_params", "(", "path", ",", "exclude_free_params", "=", "exclude_free_params", ")" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
RecurrentLayer.compute_step
Compute one step in the RNN. :return: one variable for RNN and GRU, multiple variables for LSTM
deepy/layers/recurrent.py
def compute_step(self, state, lstm_cell=None, input=None, additional_inputs=None): """ Compute one step in the RNN. :return: one variable for RNN and GRU, multiple variables for LSTM """ if not self.initialized: input_dim = None if input and hasattr(input.tag, 'last_dim'): input_dim = input.tag.last_dim self.init(input_dim) input_map = self.merge_inputs(input, additional_inputs=additional_inputs) input_map.update({"state": state, "lstm_cell": lstm_cell}) output_map = self.compute_new_state(input_map) outputs = [output_map.pop("state")] outputs += output_map.values() for tensor in outputs: tensor.tag.last_dim = self.hidden_size if len(outputs) == 1: return outputs[0] else: return outputs
def compute_step(self, state, lstm_cell=None, input=None, additional_inputs=None): """ Compute one step in the RNN. :return: one variable for RNN and GRU, multiple variables for LSTM """ if not self.initialized: input_dim = None if input and hasattr(input.tag, 'last_dim'): input_dim = input.tag.last_dim self.init(input_dim) input_map = self.merge_inputs(input, additional_inputs=additional_inputs) input_map.update({"state": state, "lstm_cell": lstm_cell}) output_map = self.compute_new_state(input_map) outputs = [output_map.pop("state")] outputs += output_map.values() for tensor in outputs: tensor.tag.last_dim = self.hidden_size if len(outputs) == 1: return outputs[0] else: return outputs
[ "Compute", "one", "step", "in", "the", "RNN", ".", ":", "return", ":", "one", "variable", "for", "RNN", "and", "GRU", "multiple", "variables", "for", "LSTM" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/layers/recurrent.py#L85-L106
[ "def", "compute_step", "(", "self", ",", "state", ",", "lstm_cell", "=", "None", ",", "input", "=", "None", ",", "additional_inputs", "=", "None", ")", ":", "if", "not", "self", ".", "initialized", ":", "input_dim", "=", "None", "if", "input", "and", "hasattr", "(", "input", ".", "tag", ",", "'last_dim'", ")", ":", "input_dim", "=", "input", ".", "tag", ".", "last_dim", "self", ".", "init", "(", "input_dim", ")", "input_map", "=", "self", ".", "merge_inputs", "(", "input", ",", "additional_inputs", "=", "additional_inputs", ")", "input_map", ".", "update", "(", "{", "\"state\"", ":", "state", ",", "\"lstm_cell\"", ":", "lstm_cell", "}", ")", "output_map", "=", "self", ".", "compute_new_state", "(", "input_map", ")", "outputs", "=", "[", "output_map", ".", "pop", "(", "\"state\"", ")", "]", "outputs", "+=", "output_map", ".", "values", "(", ")", "for", "tensor", "in", "outputs", ":", "tensor", ".", "tag", ".", "last_dim", "=", "self", ".", "hidden_size", "if", "len", "(", "outputs", ")", "==", "1", ":", "return", "outputs", "[", "0", "]", "else", ":", "return", "outputs" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
RecurrentLayer.get_initial_states
:type input_var: T.var :rtype: dict
deepy/layers/recurrent.py
def get_initial_states(self, input_var, init_state=None): """ :type input_var: T.var :rtype: dict """ initial_states = {} for state in self.state_names: if state != "state" or not init_state: if self._input_type == 'sequence' and input_var.ndim == 2: init_state = T.alloc(np.cast[env.FLOATX](0.), self.hidden_size) else: init_state = T.alloc(np.cast[env.FLOATX](0.), input_var.shape[0], self.hidden_size) initial_states[state] = init_state return initial_states
def get_initial_states(self, input_var, init_state=None): """ :type input_var: T.var :rtype: dict """ initial_states = {} for state in self.state_names: if state != "state" or not init_state: if self._input_type == 'sequence' and input_var.ndim == 2: init_state = T.alloc(np.cast[env.FLOATX](0.), self.hidden_size) else: init_state = T.alloc(np.cast[env.FLOATX](0.), input_var.shape[0], self.hidden_size) initial_states[state] = init_state return initial_states
[ ":", "type", "input_var", ":", "T", ".", "var", ":", "rtype", ":", "dict" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/layers/recurrent.py#L109-L122
[ "def", "get_initial_states", "(", "self", ",", "input_var", ",", "init_state", "=", "None", ")", ":", "initial_states", "=", "{", "}", "for", "state", "in", "self", ".", "state_names", ":", "if", "state", "!=", "\"state\"", "or", "not", "init_state", ":", "if", "self", ".", "_input_type", "==", "'sequence'", "and", "input_var", ".", "ndim", "==", "2", ":", "init_state", "=", "T", ".", "alloc", "(", "np", ".", "cast", "[", "env", ".", "FLOATX", "]", "(", "0.", ")", ",", "self", ".", "hidden_size", ")", "else", ":", "init_state", "=", "T", ".", "alloc", "(", "np", ".", "cast", "[", "env", ".", "FLOATX", "]", "(", "0.", ")", ",", "input_var", ".", "shape", "[", "0", "]", ",", "self", ".", "hidden_size", ")", "initial_states", "[", "state", "]", "=", "init_state", "return", "initial_states" ]
090fbad22a08a809b12951cd0d4984f5bd432698
test
RecurrentLayer.get_step_inputs
:type input_var: T.var :rtype: dict
deepy/layers/recurrent.py
def get_step_inputs(self, input_var, states=None, mask=None, additional_inputs=None): """ :type input_var: T.var :rtype: dict """ step_inputs = {} if self._input_type == "sequence": if not additional_inputs: additional_inputs = [] if mask: step_inputs['mask'] = mask.dimshuffle(1, 0) step_inputs.update(self.merge_inputs(input_var, additional_inputs=additional_inputs)) else: # step_inputs["mask"] = mask.dimshuffle((1,0)) if mask else None if additional_inputs: step_inputs.update(self.merge_inputs(None, additional_inputs=additional_inputs)) if states: for name in self.state_names: step_inputs[name] = states[name] return step_inputs
def get_step_inputs(self, input_var, states=None, mask=None, additional_inputs=None): """ :type input_var: T.var :rtype: dict """ step_inputs = {} if self._input_type == "sequence": if not additional_inputs: additional_inputs = [] if mask: step_inputs['mask'] = mask.dimshuffle(1, 0) step_inputs.update(self.merge_inputs(input_var, additional_inputs=additional_inputs)) else: # step_inputs["mask"] = mask.dimshuffle((1,0)) if mask else None if additional_inputs: step_inputs.update(self.merge_inputs(None, additional_inputs=additional_inputs)) if states: for name in self.state_names: step_inputs[name] = states[name] return step_inputs
[ ":", "type", "input_var", ":", "T", ".", "var", ":", "rtype", ":", "dict" ]
zomux/deepy
python
https://github.com/zomux/deepy/blob/090fbad22a08a809b12951cd0d4984f5bd432698/deepy/layers/recurrent.py#L125-L145
[ "def", "get_step_inputs", "(", "self", ",", "input_var", ",", "states", "=", "None", ",", "mask", "=", "None", ",", "additional_inputs", "=", "None", ")", ":", "step_inputs", "=", "{", "}", "if", "self", ".", "_input_type", "==", "\"sequence\"", ":", "if", "not", "additional_inputs", ":", "additional_inputs", "=", "[", "]", "if", "mask", ":", "step_inputs", "[", "'mask'", "]", "=", "mask", ".", "dimshuffle", "(", "1", ",", "0", ")", "step_inputs", ".", "update", "(", "self", ".", "merge_inputs", "(", "input_var", ",", "additional_inputs", "=", "additional_inputs", ")", ")", "else", ":", "# step_inputs[\"mask\"] = mask.dimshuffle((1,0)) if mask else None", "if", "additional_inputs", ":", "step_inputs", ".", "update", "(", "self", ".", "merge_inputs", "(", "None", ",", "additional_inputs", "=", "additional_inputs", ")", ")", "if", "states", ":", "for", "name", "in", "self", ".", "state_names", ":", "step_inputs", "[", "name", "]", "=", "states", "[", "name", "]", "return", "step_inputs" ]
090fbad22a08a809b12951cd0d4984f5bd432698