Python cntk.log() Examples

The following are 30 code examples of cntk.log(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module cntk , or try the search function .
Example #1
Source Project: ngraph-python   Author: NervanaSystems   File: test_ops_unary.py    License: Apache License 2.0 5 votes vote down vote up
def test_log():
    assert_cntk_ngraph_array_equal(C.log([1., 2.]))
    assert_cntk_ngraph_array_equal(C.log([[1, 2], [3, 4]]))
    assert_cntk_ngraph_array_equal(C.log([[[1, 2], [3, 4]], [[1, 2], [3, 4]]])) 
Example #2
Source Project: GraphicDesignPatternByPython   Author: Relph1119   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def logsumexp(x, axis=None, keepdims=False):
    return log(sum(exp(x), axis=axis, keepdims=keepdims)) 
Example #3
Source Project: GraphicDesignPatternByPython   Author: Relph1119   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def log(x):
    return C.log(x) 
Example #4
Source Project: GraphicDesignPatternByPython   Author: Relph1119   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def binary_crossentropy(target, output, from_logits=False):
    if from_logits:
        output = C.sigmoid(output)
    output = C.clip(output, epsilon(), 1.0 - epsilon())
    output = -target * C.log(output) - (1.0 - target) * C.log(1.0 - output)
    return output 
Example #5
Source Project: GraphicDesignPatternByPython   Author: Relph1119   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def categorical_crossentropy(target, output, from_logits=False, axis=-1):
    # Here, unlike other backends, the tensors lack a batch dimension:
    axis_without_batch = -1 if axis == -1 else axis - 1
    output_dimensions = list(range(len(output.shape)))
    if axis_without_batch != -1 and axis_without_batch not in output_dimensions:
        raise ValueError(
            '{}{}{}'.format(
                'Unexpected channels axis {}. '.format(axis_without_batch),
                'Expected to be -1 or one of the axes of `output`, ',
                'which has {} dimensions.'.format(len(output.shape))))
    # If the channels are not in the last axis, move them to be there:
    if axis_without_batch != -1 and axis_without_batch != output_dimensions[-1]:
        permutation = output_dimensions[:axis_without_batch]
        permutation += output_dimensions[axis_without_batch + 1:]
        permutation += [axis_without_batch]
        output = C.transpose(output, permutation)
        target = C.transpose(target, permutation)
    if from_logits:
        result = C.cross_entropy_with_softmax(output, target)
        # cntk's result shape is (batch, 1), while keras expect (batch, )
        return C.reshape(result, ())
    else:
        # scale preds so that the class probas of each sample sum to 1
        output /= C.reduce_sum(output, axis=-1)
        # avoid numerical instability with epsilon clipping
        output = C.clip(output, epsilon(), 1.0 - epsilon())
        return -sum(target * C.log(output), axis=-1) 
Example #6
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def logsumexp(x, axis=None, keepdims=False):
    return log(sum(exp(x), axis=axis, keepdims=keepdims)) 
Example #7
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def log(x):
    return C.log(x) 
Example #8
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def binary_crossentropy(target, output, from_logits=False):
    if from_logits:
        output = C.sigmoid(output)
    output = C.clip(output, epsilon(), 1.0 - epsilon())
    output = -target * C.log(output) - (1.0 - target) * C.log(1.0 - output)
    return output 
Example #9
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def categorical_crossentropy(target, output, from_logits=False):
    if from_logits:
        result = C.cross_entropy_with_softmax(output, target)
        # cntk's result shape is (batch, 1), while keras expect (batch, )
        return C.reshape(result, ())
    else:
        # scale preds so that the class probas of each sample sum to 1
        output /= C.reduce_sum(output, axis=-1)
        # avoid numerical instability with epsilon clipping
        output = C.clip(output, epsilon(), 1.0 - epsilon())
        return -sum(target * C.log(output), axis=-1) 
Example #10
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def logsumexp(x, axis=None, keepdims=False):
    return log(sum(exp(x), axis=axis, keepdims=keepdims)) 
Example #11
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def binary_crossentropy(target, output, from_logits=False):
    if from_logits:
        output = C.sigmoid(output)
    output = C.clip(output, epsilon(), 1.0 - epsilon())
    output = -target * C.log(output) - (1.0 - target) * C.log(1.0 - output)
    return output 
Example #12
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def categorical_crossentropy(target, output, from_logits=False):
    if from_logits:
        result = C.cross_entropy_with_softmax(output, target)
        # cntk's result shape is (batch, 1), while keras expect (batch, )
        return C.reshape(result, ())
    else:
        # scale preds so that the class probas of each sample sum to 1
        output /= C.reduce_sum(output, axis=-1)
        # avoid numerical instability with epsilon clipping
        output = C.clip(output, epsilon(), 1.0 - epsilon())
        return -sum(target * C.log(output), axis=-1) 
Example #13
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def logsumexp(x, axis=None, keepdims=False):
    return log(sum(exp(x), axis=axis, keepdims=keepdims)) 
Example #14
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def log(x):
    return C.log(x) 
Example #15
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def binary_crossentropy(target, output, from_logits=False):
    if from_logits:
        output = C.sigmoid(output)
    output = C.clip(output, epsilon(), 1.0 - epsilon())
    output = -target * C.log(output) - (1.0 - target) * C.log(1.0 - output)
    return output 
Example #16
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def logsumexp(x, axis=None, keepdims=False):
    return log(sum(exp(x), axis=axis, keepdims=keepdims)) 
Example #17
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def log(x):
    return C.log(x) 
Example #18
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def binary_crossentropy(target, output, from_logits=False):
    if from_logits:
        output = C.sigmoid(output)
    output = C.clip(output, epsilon(), 1.0 - epsilon())
    output = -target * C.log(output) - (1.0 - target) * C.log(1.0 - output)
    return output 
Example #19
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def categorical_crossentropy(target, output, from_logits=False):
    if from_logits:
        result = C.cross_entropy_with_softmax(output, target)
        # cntk's result shape is (batch, 1), while keras expect (batch, )
        return C.reshape(result, ())
    else:
        # scale preds so that the class probas of each sample sum to 1
        output /= C.reduce_sum(output, axis=-1)
        # avoid numerical instability with epsilon clipping
        output = C.clip(output, epsilon(), 1.0 - epsilon())
        return -sum(target * C.log(output), axis=-1) 
Example #20
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def logsumexp(x, axis=None, keepdims=False):
    return log(sum(exp(x), axis=axis, keepdims=keepdims)) 
Example #21
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def binary_crossentropy(target, output, from_logits=False):
    if from_logits:
        output = C.sigmoid(output)
    output = C.clip(output, epsilon(), 1.0 - epsilon())
    output = -target * C.log(output) - (1.0 - target) * C.log(1.0 - output)
    return output 
Example #22
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def categorical_crossentropy(target, output, from_logits=False):
    if from_logits:
        result = C.cross_entropy_with_softmax(output, target)
        # cntk's result shape is (batch, 1), while keras expect (batch, )
        return C.reshape(result, ())
    else:
        # scale preds so that the class probas of each sample sum to 1
        output /= C.reduce_sum(output, axis=-1)
        # avoid numerical instability with epsilon clipping
        output = C.clip(output, epsilon(), 1.0 - epsilon())
        return -sum(target * C.log(output), axis=-1) 
Example #23
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def logsumexp(x, axis=None, keepdims=False):
    return log(sum(exp(x), axis=axis, keepdims=keepdims)) 
Example #24
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def log(x):
    return C.log(x) 
Example #25
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def binary_crossentropy(target, output, from_logits=False):
    if from_logits:
        output = C.sigmoid(output)
    output = C.clip(output, epsilon(), 1.0 - epsilon())
    output = -target * C.log(output) - (1.0 - target) * C.log(1.0 - output)
    return output 
Example #26
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def logsumexp(x, axis=None, keepdims=False):
    return log(sum(exp(x), axis=axis, keepdims=keepdims)) 
Example #27
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def log(x):
    return C.log(x) 
Example #28
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def binary_crossentropy(target, output, from_logits=False):
    if from_logits:
        output = C.sigmoid(output)
    output = C.clip(output, epsilon(), 1.0 - epsilon())
    output = -target * C.log(output) - (1.0 - target) * C.log(1.0 - output)
    return output 
Example #29
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def categorical_crossentropy(target, output, from_logits=False):
    if from_logits:
        result = C.cross_entropy_with_softmax(output, target)
        # cntk's result shape is (batch, 1), while keras expect (batch, )
        return C.reshape(result, ())
    else:
        # scale preds so that the class probas of each sample sum to 1
        output /= C.reduce_sum(output, axis=-1)
        # avoid numerical instability with epsilon clipping
        output = C.clip(output, epsilon(), 1.0 - epsilon())
        return -sum(target * C.log(output), axis=-1) 
Example #30
Source Project: DeepLearning_Wavelet-LSTM   Author: hello-sea   File: cntk_backend.py    License: MIT License 5 votes vote down vote up
def logsumexp(x, axis=None, keepdims=False):
    return log(sum(exp(x), axis=axis, keepdims=keepdims))