Python cntk.sqrt() Examples

The following are 30 code examples of cntk.sqrt(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module cntk , or try the search function .
Example #1
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def batch_normalization(x, mean, var, beta, gamma, epsilon=1e-3):
    # The mean / var / beta / gamma may be processed by broadcast
    # so it may have an extra batch axis with 1, it is not needed
    # in cntk, need to remove those dummy axis.
    if ndim(mean) == ndim(x) and shape(mean)[0] == 1:
        mean = _reshape_dummy_dim(mean, [0])
    if ndim(var) == ndim(x) and shape(var)[0] == 1:
        var = _reshape_dummy_dim(var, [0])

    if gamma is None:
        gamma = ones_like(var)
    elif ndim(gamma) == ndim(x) and shape(gamma)[0] == 1:
        gamma = _reshape_dummy_dim(gamma, [0])

    if beta is None:
        beta = zeros_like(mean)
    elif ndim(beta) == ndim(x) and shape(beta)[0] == 1:
        beta = _reshape_dummy_dim(beta, [0])

    return (x - mean) / (C.sqrt(var) + epsilon) * gamma + beta 
Example #2
Source File: cntk_backend.py    From keras-lambda with MIT License 6 votes vote down vote up
def batch_normalization(x, mean, var, beta, gamma, epsilon=1e-3):
    # The mean / var / beta / gamma may be processed by broadcast
    # so it may have an extra batch axis with 1, it is not needed
    # in cntk, need to remove those dummy axis.
    if ndim(mean) == ndim(x) and shape(mean)[0] == 1:
        mean = _reshape_dummy_dim(mean, [0])
    if ndim(var) == ndim(x) and shape(var)[0] == 1:
        var = _reshape_dummy_dim(var, [0])

    if gamma is None:
        gamma = ones_like(var)
    elif ndim(gamma) == ndim(x) and shape(gamma)[0] == 1:
        gamma = _reshape_dummy_dim(gamma, [0])

    if beta is None:
        beta = zeros_like(mean)
    elif ndim(beta) == ndim(x) and shape(beta)[0] == 1:
        beta = _reshape_dummy_dim(beta, [0])

    return gamma * ((x - mean) / C.sqrt(var + epsilon)) + beta 
Example #3
Source File: cntk_backend.py    From GraphicDesignPatternByPython with MIT License 6 votes vote down vote up
def batch_normalization(x, mean, var, beta, gamma, axis=-1, epsilon=1e-3):
    # The mean / var / beta / gamma may be processed by broadcast
    # so it may have an extra batch axis with 1, it is not needed
    # in cntk, need to remove those dummy axis.
    if ndim(mean) == ndim(x) and shape(mean)[0] == 1:
        mean = _reshape_dummy_dim(mean, [0])
    if ndim(var) == ndim(x) and shape(var)[0] == 1:
        var = _reshape_dummy_dim(var, [0])

    if gamma is None:
        gamma = ones_like(var)
    elif ndim(gamma) == ndim(x) and shape(gamma)[0] == 1:
        gamma = _reshape_dummy_dim(gamma, [0])

    if beta is None:
        beta = zeros_like(mean)
    elif ndim(beta) == ndim(x) and shape(beta)[0] == 1:
        beta = _reshape_dummy_dim(beta, [0])

    return (x - mean) / C.sqrt(var + epsilon) * gamma + beta 
Example #4
Source File: cntk_emitter.py    From MMdnn with MIT License 6 votes vote down vote up
def _layer_BatchNorm(self):
        self.add_body(0, """
def batch_normalization(input, name, epsilon, **kwargs):
    mean = cntk.Parameter(init = __weights_dict[name]['mean'],
        name = name + "_mean")
    var = cntk.Parameter(init = __weights_dict[name]['var'],
        name = name + "_var")

    layer = (input - mean) / cntk.sqrt(var + epsilon)
    if 'scale' in __weights_dict[name]:
        scale = cntk.Parameter(init = __weights_dict[name]['scale'],
            name = name + "_scale")
        layer = scale * layer

    if 'bias' in __weights_dict[name]:
        bias = cntk.Parameter(init = __weights_dict[name]['bias'],
            name = name + "_bias")
        layer = layer + bias

    return layer
""") 
Example #5
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def batch_normalization(x, mean, var, beta, gamma, epsilon=1e-3):
    # The mean / var / beta / gamma may be processed by broadcast
    # so it may have an extra batch axis with 1, it is not needed
    # in cntk, need to remove those dummy axis.
    if ndim(mean) == ndim(x) and shape(mean)[0] == 1:
        mean = _reshape_dummy_dim(mean, [0])
    if ndim(var) == ndim(x) and shape(var)[0] == 1:
        var = _reshape_dummy_dim(var, [0])

    if gamma is None:
        gamma = ones_like(var)
    elif ndim(gamma) == ndim(x) and shape(gamma)[0] == 1:
        gamma = _reshape_dummy_dim(gamma, [0])

    if beta is None:
        beta = zeros_like(mean)
    elif ndim(beta) == ndim(x) and shape(beta)[0] == 1:
        beta = _reshape_dummy_dim(beta, [0])

    return (x - mean) / (C.sqrt(var) + epsilon) * gamma + beta 
Example #6
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def batch_normalization(x, mean, var, beta, gamma, epsilon=1e-3):
    # The mean / var / beta / gamma may be processed by broadcast
    # so it may have an extra batch axis with 1, it is not needed
    # in cntk, need to remove those dummy axis.
    if ndim(mean) == ndim(x) and shape(mean)[0] == 1:
        mean = _reshape_dummy_dim(mean, [0])
    if ndim(var) == ndim(x) and shape(var)[0] == 1:
        var = _reshape_dummy_dim(var, [0])

    if gamma is None:
        gamma = ones_like(var)
    elif ndim(gamma) == ndim(x) and shape(gamma)[0] == 1:
        gamma = _reshape_dummy_dim(gamma, [0])

    if beta is None:
        beta = zeros_like(mean)
    elif ndim(beta) == ndim(x) and shape(beta)[0] == 1:
        beta = _reshape_dummy_dim(beta, [0])

    return (x - mean) / (C.sqrt(var) + epsilon) * gamma + beta 
Example #7
Source File: cntk_backend.py    From deepQuest with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def batch_normalization(x, mean, var, beta, gamma, epsilon=1e-3):
    # The mean / var / beta / gamma may be processed by broadcast
    # so it may have an extra batch axis with 1, it is not needed
    # in cntk, need to remove those dummy axis.
    if ndim(mean) == ndim(x) and shape(mean)[0] == 1:
        mean = _reshape_dummy_dim(mean, [0])
    if ndim(var) == ndim(x) and shape(var)[0] == 1:
        var = _reshape_dummy_dim(var, [0])

    if gamma is None:
        gamma = ones_like(var)
    elif ndim(gamma) == ndim(x) and shape(gamma)[0] == 1:
        gamma = _reshape_dummy_dim(gamma, [0])

    if beta is None:
        beta = zeros_like(mean)
    elif ndim(beta) == ndim(x) and shape(beta)[0] == 1:
        beta = _reshape_dummy_dim(beta, [0])

    return gamma * ((x - mean) / C.sqrt(var + epsilon)) + beta 
Example #8
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def batch_normalization(x, mean, var, beta, gamma, epsilon=1e-3):
    # The mean / var / beta / gamma may be processed by broadcast
    # so it may have an extra batch axis with 1, it is not needed
    # in cntk, need to remove those dummy axis.
    if ndim(mean) == ndim(x) and shape(mean)[0] == 1:
        mean = _reshape_dummy_dim(mean, [0])
    if ndim(var) == ndim(x) and shape(var)[0] == 1:
        var = _reshape_dummy_dim(var, [0])

    if gamma is None:
        gamma = ones_like(var)
    elif ndim(gamma) == ndim(x) and shape(gamma)[0] == 1:
        gamma = _reshape_dummy_dim(gamma, [0])

    if beta is None:
        beta = zeros_like(mean)
    elif ndim(beta) == ndim(x) and shape(beta)[0] == 1:
        beta = _reshape_dummy_dim(beta, [0])

    return (x - mean) / (C.sqrt(var) + epsilon) * gamma + beta 
Example #9
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def batch_normalization(x, mean, var, beta, gamma, epsilon=1e-3):
    # The mean / var / beta / gamma may be processed by broadcast
    # so it may have an extra batch axis with 1, it is not needed
    # in cntk, need to remove those dummy axis.
    if ndim(mean) == ndim(x) and shape(mean)[0] == 1:
        mean = _reshape_dummy_dim(mean, [0])
    if ndim(var) == ndim(x) and shape(var)[0] == 1:
        var = _reshape_dummy_dim(var, [0])

    if gamma is None:
        gamma = ones_like(var)
    elif ndim(gamma) == ndim(x) and shape(gamma)[0] == 1:
        gamma = _reshape_dummy_dim(gamma, [0])

    if beta is None:
        beta = zeros_like(mean)
    elif ndim(beta) == ndim(x) and shape(beta)[0] == 1:
        beta = _reshape_dummy_dim(beta, [0])

    return (x - mean) / (C.sqrt(var) + epsilon) * gamma + beta 
Example #10
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def batch_normalization(x, mean, var, beta, gamma, epsilon=1e-3):
    # The mean / var / beta / gamma may be processed by broadcast
    # so it may have an extra batch axis with 1, it is not needed
    # in cntk, need to remove those dummy axis.
    if ndim(mean) == ndim(x) and shape(mean)[0] == 1:
        mean = _reshape_dummy_dim(mean, [0])
    if ndim(var) == ndim(x) and shape(var)[0] == 1:
        var = _reshape_dummy_dim(var, [0])

    if gamma is None:
        gamma = ones_like(var)
    elif ndim(gamma) == ndim(x) and shape(gamma)[0] == 1:
        gamma = _reshape_dummy_dim(gamma, [0])

    if beta is None:
        beta = zeros_like(mean)
    elif ndim(beta) == ndim(x) and shape(beta)[0] == 1:
        beta = _reshape_dummy_dim(beta, [0])

    return (x - mean) / (C.sqrt(var) + epsilon) * gamma + beta 
Example #11
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def batch_normalization(x, mean, var, beta, gamma, epsilon=1e-3):
    # The mean / var / beta / gamma may be processed by broadcast
    # so it may have an extra batch axis with 1, it is not needed
    # in cntk, need to remove those dummy axis.
    if ndim(mean) == ndim(x) and shape(mean)[0] == 1:
        mean = _reshape_dummy_dim(mean, [0])
    if ndim(var) == ndim(x) and shape(var)[0] == 1:
        var = _reshape_dummy_dim(var, [0])

    if gamma is None:
        gamma = ones_like(var)
    elif ndim(gamma) == ndim(x) and shape(gamma)[0] == 1:
        gamma = _reshape_dummy_dim(gamma, [0])

    if beta is None:
        beta = zeros_like(mean)
    elif ndim(beta) == ndim(x) and shape(beta)[0] == 1:
        beta = _reshape_dummy_dim(beta, [0])

    return (x - mean) / (C.sqrt(var) + epsilon) * gamma + beta 
Example #12
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def batch_normalization(x, mean, var, beta, gamma, epsilon=1e-3):
    # The mean / var / beta / gamma may be processed by broadcast
    # so it may have an extra batch axis with 1, it is not needed
    # in cntk, need to remove those dummy axis.
    if ndim(mean) == ndim(x) and shape(mean)[0] == 1:
        mean = _reshape_dummy_dim(mean, [0])
    if ndim(var) == ndim(x) and shape(var)[0] == 1:
        var = _reshape_dummy_dim(var, [0])

    if gamma is None:
        gamma = ones_like(var)
    elif ndim(gamma) == ndim(x) and shape(gamma)[0] == 1:
        gamma = _reshape_dummy_dim(gamma, [0])

    if beta is None:
        beta = zeros_like(mean)
    elif ndim(beta) == ndim(x) and shape(beta)[0] == 1:
        beta = _reshape_dummy_dim(beta, [0])

    return (x - mean) / (C.sqrt(var) + epsilon) * gamma + beta 
Example #13
Source File: cntk_backend.py    From deepQuest with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def std(x, axis=None, keepdims=False):
    return C.sqrt(var(x, axis=axis, keepdims=keepdims)) 
Example #14
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def std(x, axis=None, keepdims=False):
    return C.sqrt(var(x, axis=axis, keepdims=keepdims)) 
Example #15
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def sqrt(x):
    return C.sqrt(x) 
Example #16
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def l2_normalize(x, axis=None):
    axis = [axis]
    axis = _normalize_axis(axis, x)
    norm = C.sqrt(C.reduce_sum(C.square(x), axis=axis[0]))
    return x / norm 
Example #17
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def std(x, axis=None, keepdims=False):
    return C.sqrt(var(x, axis=axis, keepdims=keepdims)) 
Example #18
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def l2_normalize(x, axis=None):
    axis = [axis]
    axis = _normalize_axis(axis, x)
    norm = C.sqrt(C.reduce_sum(C.square(x), axis=axis[0]))
    return x / norm 
Example #19
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def std(x, axis=None, keepdims=False):
    return C.sqrt(var(x, axis=axis, keepdims=keepdims)) 
Example #20
Source File: cntk_backend.py    From deepQuest with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def sqrt(x):
    return C.sqrt(x) 
Example #21
Source File: cntk_backend.py    From deepQuest with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def l2_normalize(x, axis=None):
    axis = [axis]
    axis = _normalize_axis(axis, x)
    norm = C.sqrt(C.reduce_sum(C.square(x), axis=axis[0]))
    return x / norm 
Example #22
Source File: cntk_backend.py    From keras-lambda with MIT License 5 votes vote down vote up
def std(x, axis=None, keepdims=False):
    return C.sqrt(var(x, axis=axis, keepdims=keepdims)) 
Example #23
Source File: cntk_backend.py    From keras-lambda with MIT License 5 votes vote down vote up
def sqrt(x):
    return C.sqrt(x) 
Example #24
Source File: cntk_backend.py    From keras-lambda with MIT License 5 votes vote down vote up
def l2_normalize(x, axis):
    axis = [axis]
    axis = _normalize_axis(axis, x)
    norm = C.sqrt(C.reduce_sum(C.square(x), axis=axis[0]))
    return x / norm 
Example #25
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def std(x, axis=None, keepdims=False):
    return C.sqrt(var(x, axis=axis, keepdims=keepdims)) 
Example #26
Source File: cntk_backend.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def std(x, axis=None, keepdims=False):
    return C.sqrt(var(x, axis=axis, keepdims=keepdims)) 
Example #27
Source File: cntk_backend.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def sqrt(x):
    return C.sqrt(x) 
Example #28
Source File: cntk_backend.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def l2_normalize(x, axis=None):
    axis = [axis]
    axis = _normalize_axis(axis, x)
    norm = C.sqrt(C.reduce_sum(C.square(x), axis=axis[0]))
    return x / norm 
Example #29
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def std(x, axis=None, keepdims=False):
    return C.sqrt(var(x, axis=axis, keepdims=keepdims)) 
Example #30
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def sqrt(x):
    return C.sqrt(x)