Python theano.clone() Examples

The following are 30 code examples of theano.clone(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module theano , or try the search function .
Example #1
Source File: utils.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def deep_clone(output, replace, **kwargs):
    """
    like theano.clone, but makes sure to replace in the default_update of
    shared variables as well
    """
    new_output = list(output)
    default_update_idxs = []
    for idx, v in enumerate(theano.gof.graph.inputs(output)):
        if hasattr(v, "default_update"):
            new_output.append(v.default_update)
            default_update_idxs.append(idx)
    cloned = theano.clone(new_output, replace, **kwargs)
    cloned_output = cloned[:len(output)]
    cloned_default_updates = cloned[len(output):]
    assert len(cloned_default_updates) == len(default_update_idxs)
    cloned_inputs = theano.gof.graph.inputs(cloned_output)
    for idx, update in zip(default_update_idxs, cloned_default_updates):
        v = cloned_inputs[idx]
        assert hasattr(v, "default_update")
        v.default_update = update
    return cloned_output 
Example #2
Source File: utils.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def deep_clone(output, replace, **kwargs):
    """
    like theano.clone, but makes sure to replace in the default_update of
    shared variables as well
    """
    new_output = list(output)
    default_update_idxs = []
    for idx, v in enumerate(theano.gof.graph.inputs(output)):
        if hasattr(v, "default_update"):
            new_output.append(v.default_update)
            default_update_idxs.append(idx)
    cloned = theano.clone(new_output, replace, **kwargs)
    cloned_output = cloned[:len(output)]
    cloned_default_updates = cloned[len(output):]
    assert len(cloned_default_updates) == len(default_update_idxs)
    cloned_inputs = theano.gof.graph.inputs(cloned_output)
    for idx, update in zip(default_update_idxs, cloned_default_updates):
        v = cloned_inputs[idx]
        assert hasattr(v, "default_update")
        v.default_update = update
    return cloned_output 
Example #3
Source File: scan_utils.py    From D-VAE with MIT License 6 votes vote down vote up
def reconstruct_graph(inputs, outputs, tag=None):
    """
    Different interface to clone, that allows you to pass inputs.
    Compared to clone, this method always replaces the inputs with
    new variables of the same type, and returns those (in the same
    order as the original inputs).

    """
    if tag is None:
        tag = ''
    nw_inputs = [safe_new(x, tag) for x in inputs]
    givens = OrderedDict()
    for nw_x, x in izip(nw_inputs, inputs):
        givens[x] = nw_x
    allinputs = theano.gof.graph.inputs(outputs)
    for inp in allinputs:
        if isinstance(inp, theano.Constant):
            givens[inp] = inp.clone()

    nw_outputs = clone(outputs, replace=givens)
    return (nw_inputs, nw_outputs) 
Example #4
Source File: test_elemwise.py    From D-VAE with MIT License 6 votes vote down vote up
def test_gt_grad():
    """A user test that failed.

    Something about it made Elemwise.grad return something that was
    too complicated for get_scalar_constant_value to recognize as being 0, so
    gradient.grad reported that it was not a valid gradient of an
    integer.

    """
    floatX = config.floatX
    T = theano.tensor

    input_ = T.vector(dtype=floatX)
    random_values = numpy.random.RandomState(1234).uniform(
                                                low=-1, high=1, size=(2, 2))
    W_values = numpy.asarray(random_values, dtype=floatX)
    W = theano.shared(value=W_values, name='weights')
    correct_score = T.dot(input_, W)
    wrong_input = T.vector(dtype=floatX)
    wrong_score = theano.clone(correct_score, {input_: wrong_input})
    # Hinge loss

    scores = T.ones_like(correct_score) - correct_score + wrong_score
    cost = (scores * (scores > 0)).sum()
    T.grad(cost, input_) 
Example #5
Source File: builders.py    From D-VAE with MIT License 6 votes vote down vote up
def infer_shape(self, node, shapes):
        out_shp = theano.scan_module.scan_utils.infer_shape(self.new_outputs,
                                                            self.new_inputs,
                                                            shapes)

        # Clone the output shape so that shape are computed from outer inputs.
        # Note:
        # Here we can do it more simply like:
        #      ret = [theano.clone(shp, replace=repl) for shp in out_shp]
        # But  doing it multiple time could duplicate common subgraph between
        # each shape call. Theano optimizer will clean this up later, but this
        # will ask extra work to the optimizer.
        repl = dict(zip(self.new_inputs, node.inputs))
        cloned = theano.clone(reduce(tuple.__add__, out_shp), replace=repl)
        ret = []
        used = 0
        for i in range(len(out_shp)):
            nb = len(out_shp[i])
            ret.append(cloned[used: used + nb])
            used += nb

        return ret 
Example #6
Source File: servoing_policy_network.py    From visual_dynamics with MIT License 6 votes vote down vote up
def __init__(self, incoming, servoing_pol, **kwargs):

        assert isinstance(servoing_pol, TheanoServoingPolicy)
        super(TheanoServoingPolicyLayer, self).__init__(incoming, **kwargs)

        assert len(self.input_shape) == 4 and self.input_shape[1] == 6
        self.action_space = servoing_pol.action_space

        self.sqrt_w_var = self.add_param(np.sqrt(servoing_pol.w).astype(theano.config.floatX), servoing_pol.w.shape, name='sqrt_w')
        self.sqrt_lambda_var = self.add_param(np.sqrt(servoing_pol.lambda_).astype(theano.config.floatX), servoing_pol.lambda_.shape, name='sqrt_lambda')
        self.w_var = self.sqrt_w_var ** 2
        self.lambda_var = self.sqrt_lambda_var ** 2

        self.X_var, U_var, self.X_target_var, self.U_lin_var, alpha_var = servoing_pol.input_vars
        w_var, lambda_var = servoing_pol.param_vars
        pi_var = servoing_pol._get_pi_var()
        self.pi_var = theano.clone(pi_var, replace={w_var: self.w_var,
                                                    lambda_var: self.lambda_var,
                                                    alpha_var: np.array(servoing_pol.alpha, dtype=theano.config.floatX)}) 
Example #7
Source File: servoing_policy.py    From visual_dynamics with MIT License 6 votes vote down vote up
def _get_jac_vars(self):
        if not self.predictor.feature_jacobian_name:
            raise NotImplementedError

        X_var, U_var, X_target_var, U_lin_var, alpha_var = self.input_vars

        names = [self.predictor.feature_name, self.predictor.feature_jacobian_name, self.predictor.next_feature_name]
        vars_ = L.get_output([self.predictor.pred_layers[name] for name in iter_util.flatten_tree(names)], deterministic=True)
        feature_vars, jac_vars, next_feature_vars = iter_util.unflatten_tree(names, vars_)

        y_vars = [T.flatten(feature_var, outdim=2) for feature_var in feature_vars]
        y_target_vars = [theano.clone(y_var, replace={X_var: X_target_var}) for y_var in y_vars]
        y_target_vars = [theano.ifelse.ifelse(T.eq(alpha_var, 1.0),
                                              y_target_var,
                                              alpha_var * y_target_var + (1 - alpha_var) * y_var)
                         for (y_var, y_target_var) in zip(y_vars, y_target_vars)]

        jac_vars = [theano.clone(jac_var, replace={U_var: U_lin_var}) for jac_var in jac_vars]
        return jac_vars 
Example #8
Source File: servoing_policy.py    From visual_dynamics with MIT License 6 votes vote down vote up
def _get_jac_z_vars(self):
        if not self.predictor.feature_jacobian_name:
            raise NotImplementedError

        X_var, U_var, X_target_var, U_lin_var, alpha_var = self.input_vars

        names = [self.predictor.feature_name, self.predictor.feature_jacobian_name, self.predictor.next_feature_name]
        vars_ = L.get_output([self.predictor.pred_layers[name] for name in iter_util.flatten_tree(names)], deterministic=True)
        feature_vars, jac_vars, next_feature_vars = iter_util.unflatten_tree(names, vars_)

        y_vars = [T.flatten(feature_var, outdim=2) for feature_var in feature_vars]
        y_target_vars = [theano.clone(y_var, replace={X_var: X_target_var}) for y_var in y_vars]
        y_target_vars = [theano.ifelse.ifelse(T.eq(alpha_var, 1.0),
                                              y_target_var,
                                              alpha_var * y_target_var + (1 - alpha_var) * y_var)
                         for (y_var, y_target_var) in zip(y_vars, y_target_vars)]

        jac_vars = [theano.clone(jac_var, replace={U_var: U_lin_var}) for jac_var in jac_vars]
        y_next_pred_vars = [T.flatten(next_feature_var, outdim=2) for next_feature_var in next_feature_vars]
        y_next_pred_vars = [theano.clone(y_next_pred_var, replace={U_var: U_lin_var}) for y_next_pred_var in y_next_pred_vars]

        z_vars = [y_target_var - y_next_pred_var + T.batched_tensordot(jac_var, U_lin_var, axes=(2, 1))
                  for (y_target_var, y_next_pred_var, jac_var) in zip(y_target_vars, y_next_pred_vars, jac_vars)]
        return jac_vars, z_vars 
Example #9
Source File: utils.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def deep_clone(output, replace, **kwargs):
    """
    like theano.clone, but makes sure to replace in the default_update of
    shared variables as well
    """
    new_output = list(output)
    default_update_idxs = []
    for idx, v in enumerate(theano.gof.graph.inputs(output)):
        if hasattr(v, "default_update"):
            new_output.append(v.default_update)
            default_update_idxs.append(idx)
    cloned = theano.clone(new_output, replace, **kwargs)
    cloned_output = cloned[:len(output)]
    cloned_default_updates = cloned[len(output):]
    assert len(cloned_default_updates) == len(default_update_idxs)
    cloned_inputs = theano.gof.graph.inputs(cloned_output)
    for idx, update in zip(default_update_idxs, cloned_default_updates):
        v = cloned_inputs[idx]
        assert hasattr(v, "default_update")
        v.default_update = update
    return cloned_output 
Example #10
Source File: test_elemwise.py    From attention-lvcsr with MIT License 6 votes vote down vote up
def test_gt_grad():
    """A user test that failed.

    Something about it made Elemwise.grad return something that was
    too complicated for get_scalar_constant_value to recognize as being 0, so
    gradient.grad reported that it was not a valid gradient of an
    integer.

    """
    floatX = config.floatX
    T = theano.tensor

    input_ = T.vector(dtype=floatX)
    random_values = numpy.random.RandomState(1234).uniform(
                                                low=-1, high=1, size=(2, 2))
    W_values = numpy.asarray(random_values, dtype=floatX)
    W = theano.shared(value=W_values, name='weights')
    correct_score = T.dot(input_, W)
    wrong_input = T.vector(dtype=floatX)
    wrong_score = theano.clone(correct_score, {input_: wrong_input})
    # Hinge loss

    scores = T.ones_like(correct_score) - correct_score + wrong_score
    cost = (scores * (scores > 0)).sum()
    T.grad(cost, input_) 
Example #11
Source File: builders.py    From attention-lvcsr with MIT License 6 votes vote down vote up
def infer_shape(self, node, shapes):
        out_shp = theano.scan_module.scan_utils.infer_shape(self.new_outputs,
                                                            self.new_inputs,
                                                            shapes)

        # Clone the output shape so that shape are computed from outer inputs.
        # Note:
        # Here we can do it more simply like:
        #      ret = [theano.clone(shp, replace=repl) for shp in out_shp]
        # But  doing it multiple time could duplicate common subgraph between
        # each shape call. Theano optimizer will clean this up later, but this
        # will ask extra work to the optimizer.
        repl = dict(zip(self.new_inputs, node.inputs))
        cloned = theano.clone(reduce(tuple.__add__, out_shp), replace=repl)
        ret = []
        used = 0
        for i in range(len(out_shp)):
            nb = len(out_shp[i])
            ret.append(cloned[used: used + nb])
            used += nb

        return ret 
Example #12
Source File: scan_utils.py    From attention-lvcsr with MIT License 6 votes vote down vote up
def reconstruct_graph(inputs, outputs, tag=None):
    """
    Different interface to clone, that allows you to pass inputs.
    Compared to clone, this method always replaces the inputs with
    new variables of the same type, and returns those (in the same
    order as the original inputs).

    """
    if tag is None:
        tag = ''
    nw_inputs = [safe_new(x, tag) for x in inputs]
    givens = OrderedDict()
    for nw_x, x in izip(nw_inputs, inputs):
        givens[x] = nw_x
    allinputs = theano.gof.graph.inputs(outputs)
    for inp in allinputs:
        if isinstance(inp, theano.Constant):
            givens[inp] = inp.clone()

    nw_outputs = clone(outputs, replace=givens)
    return (nw_inputs, nw_outputs) 
Example #13
Source File: utils.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def deep_clone(output, replace, **kwargs):
    """
    like theano.clone, but makes sure to replace in the default_update of
    shared variables as well
    """
    new_output = list(output)
    default_update_idxs = []
    for idx, v in enumerate(theano.gof.graph.inputs(output)):
        if hasattr(v, "default_update"):
            new_output.append(v.default_update)
            default_update_idxs.append(idx)
    cloned = theano.clone(new_output, replace, **kwargs)
    cloned_output = cloned[:len(output)]
    cloned_default_updates = cloned[len(output):]
    assert len(cloned_default_updates) == len(default_update_idxs)
    cloned_inputs = theano.gof.graph.inputs(cloned_output)
    for idx, update in zip(default_update_idxs, cloned_default_updates):
        v = cloned_inputs[idx]
        assert hasattr(v, "default_update")
        v.default_update = update
    return cloned_output 
Example #14
Source File: utils.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def deep_clone(output, replace, **kwargs):
    """
    like theano.clone, but makes sure to replace in the default_update of
    shared variables as well
    """
    new_output = list(output)
    default_update_idxs = []
    for idx, v in enumerate(theano.gof.graph.inputs(output)):
        if hasattr(v, "default_update"):
            new_output.append(v.default_update)
            default_update_idxs.append(idx)
    cloned = theano.clone(new_output, replace, **kwargs)
    cloned_output = cloned[:len(output)]
    cloned_default_updates = cloned[len(output):]
    assert len(cloned_default_updates) == len(default_update_idxs)
    cloned_inputs = theano.gof.graph.inputs(cloned_output)
    for idx, update in zip(default_update_idxs, cloned_default_updates):
        v = cloned_inputs[idx]
        assert hasattr(v, "default_update")
        v.default_update = update
    return cloned_output 
Example #15
Source File: REINFORCE.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def compute_output(self, network, mu_vw, sigma_vw):
        deterministic = network.find_hyperparameter(["deterministic"], False)
        if deterministic:
            res = mu_vw.variable
        else:
            # TODO look at shape of both mu and sigma
            shape = mu_vw.shape
            if any(s is None for s in shape):
                # NOTE: this uses symbolic shape - can be an issue with
                # theano.clone and random numbers
                # https://groups.google.com/forum/#!topic/theano-users/P7Mv7Fg0kUs
                warnings.warn("using symbolic shape for random number shape, "
                              "which can be an issue with theano.clone")
                shape = mu_vw.variable.shape
            # TODO save this state so that we can seed the rng
            srng = MRG_RandomStreams()
            res = srng.normal(shape,
                              avg=mu_vw.variable,
                              std=sigma_vw.variable,
                              dtype=fX)
        network.create_vw(
            "default",
            variable=theano.gradient.disconnected_grad(res),
            shape=mu_vw.shape,
            tags={"output"},
        ) 
Example #16
Source File: stochastic.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def compute_output(self, network, in_vw):
        deterministic = network.find_hyperparameter(["deterministic"])
        p = network.find_hyperparameter(["dropout_probability",
                                         "probability",
                                         "p"],
                                        0)
        if deterministic or p == 0:
            network.copy_vw(
                name="default",
                previous_vw=in_vw,
                tags={"output"},
            )
        else:
            rescale_factor = 1 / (1 - p)
            mask_shape = in_vw.shape
            if any(s is None for s in mask_shape):
                # NOTE: this uses symbolic shape - can be an issue with
                # theano.clone and random numbers
                # https://groups.google.com/forum/#!topic/theano-users/P7Mv7Fg0kUs
                warnings.warn("using symbolic shape for dropout mask, "
                              "which can be an issue with theano.clone")
                mask_shape = in_vw.symbolic_shape()
            # FIXME generalize to other shape dimensions.
            # assume this is of the form bc01 (batch, channel, width, height)
            mask_shape = mask_shape[:2]
            # TODO save this state so that we can seed the rng
            srng = MRG_RandomStreams()
            # set bernoulli probability to be inverse of dropout probability
            # because 1 means to keep the unit
            bernoulli_prob = 1 - p
            mask = rescale_factor * srng.binomial(mask_shape,
                                                  p=bernoulli_prob,
                                                  dtype=fX)
            mask = mask.dimshuffle(0, 1, 'x', 'x')
            network.create_vw(
                "default",
                variable=in_vw.variable * mask,
                shape=in_vw.shape,
                tags={"output"},
            ) 
Example #17
Source File: stochastic.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def compute_output(self, network, in_vw):
        deterministic = network.find_hyperparameter(["deterministic"])
        p = network.find_hyperparameter(["dropout_probability",
                                         "probability",
                                         "p"],
                                        0)
        if deterministic or p == 0:
            network.copy_vw(
                name="default",
                previous_vw=in_vw,
                tags={"output"},
            )
        else:
            rescale_factor = 1 / (1 - p)
            mask_shape = in_vw.shape
            if any(s is None for s in mask_shape):
                # NOTE: this uses symbolic shape - can be an issue with
                # theano.clone and random numbers
                # https://groups.google.com/forum/#!topic/theano-users/P7Mv7Fg0kUs
                warnings.warn("using symbolic shape for dropout mask, "
                              "which can be an issue with theano.clone")
                mask_shape = in_vw.variable.shape
            # TODO save this state so that we can seed the rng
            srng = MRG_RandomStreams()
            # set bernoulli probability to be inverse of dropout probability
            # because 1 means to keep the unit
            bernoulli_prob = 1 - p
            mask = rescale_factor * srng.binomial(mask_shape,
                                                  p=bernoulli_prob,
                                                  dtype=fX)
            network.create_vw(
                "default",
                variable=in_vw.variable * mask,
                shape=in_vw.shape,
                tags={"output"},
            ) 
Example #18
Source File: stochastic.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def compute_output(self, network, in_vw):
        deterministic = network.find_hyperparameter(["deterministic"])
        sigma = network.find_hyperparameter(["sigma"], None)
        if sigma is None:
            p = network.find_hyperparameter(["dropout_probability",
                                             "probability",
                                             "p"],
                                            0)
            if p == 0:
                sigma = 0
            else:
                # derive gaussian dropout variance from bernoulli dropout
                # probability
                sigma = T.sqrt(p / (1 - p))
        if deterministic or sigma == 0:
            network.copy_vw(
                name="default",
                previous_vw=in_vw,
                tags={"output"},
            )
        else:
            mask_shape = in_vw.shape
            if any(s is None for s in mask_shape):
                # NOTE: this uses symbolic shape - can be an issue with
                # theano.clone and random numbers
                # https://groups.google.com/forum/#!topic/theano-users/P7Mv7Fg0kUs
                warnings.warn("using symbolic shape for dropout mask, "
                              "which can be an issue with theano.clone")
                mask_shape = in_vw.variable.shape
            # TODO save this state so that we can seed the rng
            srng = MRG_RandomStreams()
            mask = srng.normal(mask_shape, avg=1.0, std=sigma, dtype=fX)
            network.create_vw(
                "default",
                variable=in_vw.variable * mask,
                shape=in_vw.shape,
                tags={"output"},
            ) 
Example #19
Source File: batch_norms.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def get_output_for(self, input, deterministic=False, **kwargs):
        beta = self.beta;
        if not deterministic:
            self_beta = theano.clone(self.beta, share_inputs=False);
            input_beta = ttt.percentile(input, self.perc);
            self_beta.default_update = ((1 - self.alpha) * self_beta + self.alpha * input_beta);
            beta += 0 * self_beta;

        # thresholding
        return theano.tensor.nnet.sigmoid(self.tight*(input-beta+self.bias)); 
Example #20
Source File: batch_norms.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def get_output_for(self, input, deterministic=False, **kwargs):
        beta = self.beta;
        if not deterministic:
            self_beta = theano.clone(self.beta, share_inputs=False);
            input_beta = ttt.percentile(input, self.perc);
            self_beta.default_update = ((1 - self.alpha) * self_beta + self.alpha * input_beta);
            beta += 0 * self_beta;

        # thresholding
        return theano.tensor.nnet.sigmoid(self.tight*(input-beta+self.bias)); 
Example #21
Source File: batch_norms.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def get_output_for(self, input, deterministic=False, **kwargs):
        beta = self.beta;
        if not deterministic:
            self_beta = theano.clone(self.beta, share_inputs=False);
            input_beta = ttt.percentile(input, self.perc);
            self_beta.default_update = ((1 - self.alpha) * self_beta + self.alpha * input_beta);
            beta += 0 * self_beta;

        # thresholding
        return theano.tensor.nnet.relu(input-beta, 0.0); 
Example #22
Source File: utils_test.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_clone():
    """
    NOTE: if this test eventually passes (eg. theano fixes the issue),
    deep_clone may no longer be necessary
    """
    _clone_test_case(theano.clone) 
Example #23
Source File: dNDF.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def compute_output(self, network, in_vw):
        axis = network.find_hyperparameter(["axis"])
        deterministic = network.find_hyperparameter(["deterministic"], False)

        # calculate output shape
        output_shape = list(in_vw.shape)
        output_shape.pop(axis)

        if deterministic:
            out_var = in_vw.variable.mean(axis=axis)
        else:
            # TODO save this state so that we can seed the rng
            srng = MRG_RandomStreams()
            if in_vw.shape[axis] is None:
                # NOTE: this uses symbolic shape - can be an issue with
                # theano.clone and random numbers
                # https://groups.google.com/forum/#!topic/theano-users/P7Mv7Fg0kUs
                warnings.warn("using symbolic shape for random variable size "
                              "which can be an issue with theano.clone")
            idx = T.argmax(srng.normal([in_vw.symbolic_shape()[axis]]))
            slices = tuple([slice(None) for _ in range(axis)] + [idx])
            out_var = in_vw.variable[slices]

        network.create_vw(
            "default",
            variable=out_var,
            shape=tuple(output_shape),
            tags={"output"},
        ) 
Example #24
Source File: REINFORCE.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def compute_output(self, network, mu_vw, sigma_vw):
        deterministic = network.find_hyperparameter(["deterministic"], False)
        if deterministic:
            res = mu_vw.variable
        else:
            # TODO look at shape of both mu and sigma
            shape = mu_vw.shape
            if any(s is None for s in shape):
                # NOTE: this uses symbolic shape - can be an issue with
                # theano.clone and random numbers
                # https://groups.google.com/forum/#!topic/theano-users/P7Mv7Fg0kUs
                warnings.warn("using symbolic shape for random number shape, "
                              "which can be an issue with theano.clone")
                shape = mu_vw.variable.shape
            # TODO save this state so that we can seed the rng
            srng = MRG_RandomStreams()
            res = srng.normal(shape,
                              avg=mu_vw.variable,
                              std=sigma_vw.variable,
                              dtype=fX)
        network.create_vw(
            "default",
            variable=theano.gradient.disconnected_grad(res),
            shape=mu_vw.shape,
            tags={"output"},
        ) 
Example #25
Source File: stochastic.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def compute_output(self, network, in_vw):
        deterministic = network.find_hyperparameter(["deterministic"])
        p = network.find_hyperparameter(["dropout_probability",
                                         "probability",
                                         "p"],
                                        0)
        if deterministic or p == 0:
            network.copy_vw(
                name="default",
                previous_vw=in_vw,
                tags={"output"},
            )
        else:
            rescale_factor = 1 / (1 - p)
            mask_shape = in_vw.shape
            if any(s is None for s in mask_shape):
                # NOTE: this uses symbolic shape - can be an issue with
                # theano.clone and random numbers
                # https://groups.google.com/forum/#!topic/theano-users/P7Mv7Fg0kUs
                warnings.warn("using symbolic shape for dropout mask, "
                              "which can be an issue with theano.clone")
                mask_shape = in_vw.symbolic_shape()
            # FIXME generalize to other shape dimensions.
            # assume this is of the form bc01 (batch, channel, width, height)
            mask_shape = mask_shape[:2]
            # TODO save this state so that we can seed the rng
            srng = MRG_RandomStreams()
            # set bernoulli probability to be inverse of dropout probability
            # because 1 means to keep the unit
            bernoulli_prob = 1 - p
            mask = rescale_factor * srng.binomial(mask_shape,
                                                  p=bernoulli_prob,
                                                  dtype=fX)
            mask = mask.dimshuffle(0, 1, 'x', 'x')
            network.create_vw(
                "default",
                variable=in_vw.variable * mask,
                shape=in_vw.shape,
                tags={"output"},
            ) 
Example #26
Source File: stochastic.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def compute_output(self, network, in_vw):
        deterministic = network.find_hyperparameter(["deterministic"])
        p = network.find_hyperparameter(["dropout_probability",
                                         "probability",
                                         "p"],
                                        0)
        if deterministic or p == 0:
            network.copy_vw(
                name="default",
                previous_vw=in_vw,
                tags={"output"},
            )
        else:
            rescale_factor = 1 / (1 - p)
            mask_shape = in_vw.shape
            if any(s is None for s in mask_shape):
                # NOTE: this uses symbolic shape - can be an issue with
                # theano.clone and random numbers
                # https://groups.google.com/forum/#!topic/theano-users/P7Mv7Fg0kUs
                warnings.warn("using symbolic shape for dropout mask, "
                              "which can be an issue with theano.clone")
                mask_shape = in_vw.variable.shape
            # TODO save this state so that we can seed the rng
            srng = MRG_RandomStreams()
            # set bernoulli probability to be inverse of dropout probability
            # because 1 means to keep the unit
            bernoulli_prob = 1 - p
            mask = rescale_factor * srng.binomial(mask_shape,
                                                  p=bernoulli_prob,
                                                  dtype=fX)
            network.create_vw(
                "default",
                variable=in_vw.variable * mask,
                shape=in_vw.shape,
                tags={"output"},
            ) 
Example #27
Source File: stochastic.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def compute_output(self, network, in_vw):
        deterministic = network.find_hyperparameter(["deterministic"])
        p = network.find_hyperparameter(["dropout_probability",
                                         "probability",
                                         "p"],
                                        0)
        if deterministic or p == 0:
            network.copy_vw(
                name="default",
                previous_vw=in_vw,
                tags={"output"},
            )
        else:
            rescale_factor = 1 / (1 - p)
            mask_shape = in_vw.shape
            if any(s is None for s in mask_shape):
                # NOTE: this uses symbolic shape - can be an issue with
                # theano.clone and random numbers
                # https://groups.google.com/forum/#!topic/theano-users/P7Mv7Fg0kUs
                warnings.warn("using symbolic shape for dropout mask, "
                              "which can be an issue with theano.clone")
                mask_shape = in_vw.symbolic_shape()
            # FIXME generalize to other shape dimensions.
            # assume this is of the form bc01 (batch, channel, width, height)
            mask_shape = mask_shape[:2]
            # TODO save this state so that we can seed the rng
            srng = MRG_RandomStreams()
            # set bernoulli probability to be inverse of dropout probability
            # because 1 means to keep the unit
            bernoulli_prob = 1 - p
            mask = rescale_factor * srng.binomial(mask_shape,
                                                  p=bernoulli_prob,
                                                  dtype=fX)
            mask = mask.dimshuffle(0, 1, 'x', 'x')
            network.create_vw(
                "default",
                variable=in_vw.variable * mask,
                shape=in_vw.shape,
                tags={"output"},
            ) 
Example #28
Source File: batch_norms.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def get_output_for(self, input, deterministic=False, **kwargs):
        beta = self.beta;
        if not deterministic:
            self_beta = theano.clone(self.beta, share_inputs=False);
            input_beta = ttt.percentile(input, self.perc);
            self_beta.default_update = ((1 - self.alpha) * self_beta + self.alpha * input_beta);
            beta += 0 * self_beta;

        # thresholding
        return theano.tensor.nnet.sigmoid(self.tight*(input-beta+self.bias)); 
Example #29
Source File: batch_norms.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def get_output_for(self, input, deterministic=False, **kwargs):
        beta = self.beta;
        if not deterministic:
            self_beta = theano.clone(self.beta, share_inputs=False);
            input_beta = ttt.percentile(input, self.perc);
            self_beta.default_update = ((1 - self.alpha) * self_beta + self.alpha * input_beta);
            beta += 0 * self_beta;

        # thresholding
        return theano.tensor.nnet.relu(input-beta, 0.0); 
Example #30
Source File: REINFORCE.py    From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def compute_output(self, network, mu_vw, sigma_vw):
        deterministic = network.find_hyperparameter(["deterministic"], False)
        if deterministic:
            res = mu_vw.variable
        else:
            # TODO look at shape of both mu and sigma
            shape = mu_vw.shape
            if any(s is None for s in shape):
                # NOTE: this uses symbolic shape - can be an issue with
                # theano.clone and random numbers
                # https://groups.google.com/forum/#!topic/theano-users/P7Mv7Fg0kUs
                warnings.warn("using symbolic shape for random number shape, "
                              "which can be an issue with theano.clone")
                shape = mu_vw.variable.shape
            # TODO save this state so that we can seed the rng
            srng = MRG_RandomStreams()
            res = srng.normal(shape,
                              avg=mu_vw.variable,
                              std=sigma_vw.variable,
                              dtype=fX)
        network.create_vw(
            "default",
            variable=theano.gradient.disconnected_grad(res),
            shape=mu_vw.shape,
            tags={"output"},
        )