Python caffe.proto.caffe_pb2.NetParameter() Examples

The following are 30 code examples of caffe.proto.caffe_pb2.NetParameter(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module caffe.proto.caffe_pb2 , or try the search function .
Example #1
Source File: compute_bn_statistics.py    From facade-segmentation with MIT License 8 votes vote down vote up
def make_testable(train_model_path):
    # load the train net prototxt as a protobuf message
    with open(train_model_path) as f:
        train_str = f.read()
    train_net = caffe_pb2.NetParameter()
    text_format.Merge(train_str, train_net)

    # add the mean, var top blobs to all BN layers
    for layer in train_net.layer:
        if layer.type == "BN" and len(layer.top) == 1:
            layer.top.append(layer.top[0] + "-mean")
            layer.top.append(layer.top[0] + "-var")

    # remove the test data layer if present
    if train_net.layer[1].name == "data" and train_net.layer[1].include:
        train_net.layer.remove(train_net.layer[1])
        if train_net.layer[0].include:
            # remove the 'include {phase: TRAIN}' layer param
            train_net.layer[0].include.remove(train_net.layer[0].include[0])
    return train_net 
Example #2
Source File: pickle_caffe_blobs.py    From KL-Loss with Apache License 2.0 7 votes vote down vote up
def load_and_convert_caffe_model(prototxt_file_name, caffemodel_file_name):
    caffenet = caffe_pb2.NetParameter()
    caffenet_weights = caffe_pb2.NetParameter()
    text_format.Merge(open(prototxt_file_name).read(), caffenet)
    caffenet_weights.ParseFromString(open(caffemodel_file_name).read())
    # C2 conv layers current require biases, but they are optional in C1
    # Add zeros as biases is they are missing
    add_missing_biases(caffenet_weights)
    # We only care about getting parameters, so remove layers w/o parameters
    remove_layers_without_parameters(caffenet, caffenet_weights)
    # BatchNorm is not implemented in the translator *and* we need to fold Scale
    # layers into the new C2 SpatialBN op, hence we remove the batch norm layers
    # and apply custom translations code
    bn_weights = remove_spatial_bn_layers(caffenet, caffenet_weights)
    # Set num, channel, height and width for blobs that use shape.dim instead
    normalize_shape(caffenet_weights)
    # Translate the rest of the model
    net, pretrained_weights = caffe_translator.TranslateModel(
        caffenet, caffenet_weights
    )
    pretrained_weights.protos.extend(bn_weights)
    return net, pretrained_weights 
Example #3
Source File: pickle_caffe_blobs.py    From Detectron-DA-Faster-RCNN with Apache License 2.0 6 votes vote down vote up
def load_and_convert_caffe_model(prototxt_file_name, caffemodel_file_name):
    caffenet = caffe_pb2.NetParameter()
    caffenet_weights = caffe_pb2.NetParameter()
    text_format.Merge(open(prototxt_file_name).read(), caffenet)
    caffenet_weights.ParseFromString(open(caffemodel_file_name).read())
    # C2 conv layers current require biases, but they are optional in C1
    # Add zeros as biases is they are missing
    add_missing_biases(caffenet_weights)
    # We only care about getting parameters, so remove layers w/o parameters
    remove_layers_without_parameters(caffenet, caffenet_weights)
    # BatchNorm is not implemented in the translator *and* we need to fold Scale
    # layers into the new C2 SpatialBN op, hence we remove the batch norm layers
    # and apply custom translations code
    bn_weights = remove_spatial_bn_layers(caffenet, caffenet_weights)
    # Set num, channel, height and width for blobs that use shape.dim instead
    normalize_shape(caffenet_weights)
    # Translate the rest of the model
    net, pretrained_weights = caffe_translator.TranslateModel(
        caffenet, caffenet_weights
    )
    pretrained_weights.protos.extend(bn_weights)
    return net, pretrained_weights 
Example #4
Source File: pickle_caffe_blobs.py    From masktextspotter.caffe2 with Apache License 2.0 6 votes vote down vote up
def load_and_convert_caffe_model(prototxt_file_name, caffemodel_file_name):
    caffenet = caffe_pb2.NetParameter()
    caffenet_weights = caffe_pb2.NetParameter()
    text_format.Merge(open(prototxt_file_name).read(), caffenet)
    caffenet_weights.ParseFromString(open(caffemodel_file_name).read())
    # C2 conv layers current require biases, but they are optional in C1
    # Add zeros as biases is they are missing
    add_missing_biases(caffenet_weights)
    # We only care about getting parameters, so remove layers w/o parameters
    remove_layers_without_parameters(caffenet, caffenet_weights)
    # BatchNorm is not implemented in the translator *and* we need to fold Scale
    # layers into the new C2 SpatialBN op, hence we remove the batch norm layers
    # and apply custom translations code
    bn_weights = remove_spatial_bn_layers(caffenet, caffenet_weights)
    # Set num, channel, height and width for blobs that use shape.dim instead
    normalize_shape(caffenet_weights)
    # Translate the rest of the model
    net, pretrained_weights = caffe_translator.TranslateModel(
        caffenet, caffenet_weights
    )
    pretrained_weights.protos.extend(bn_weights)
    return net, pretrained_weights 
Example #5
Source File: pickle_caffe_blobs.py    From Detectron-Cascade-RCNN with Apache License 2.0 6 votes vote down vote up
def load_and_convert_caffe_model(prototxt_file_name, caffemodel_file_name):
    caffenet = caffe_pb2.NetParameter()
    caffenet_weights = caffe_pb2.NetParameter()
    text_format.Merge(open(prototxt_file_name).read(), caffenet)
    caffenet_weights.ParseFromString(open(caffemodel_file_name).read())
    # C2 conv layers current require biases, but they are optional in C1
    # Add zeros as biases is they are missing
    add_missing_biases(caffenet_weights)
    # We only care about getting parameters, so remove layers w/o parameters
    remove_layers_without_parameters(caffenet, caffenet_weights)
    # BatchNorm is not implemented in the translator *and* we need to fold Scale
    # layers into the new C2 SpatialBN op, hence we remove the batch norm layers
    # and apply custom translations code
    bn_weights = remove_spatial_bn_layers(caffenet, caffenet_weights)
    # Set num, channel, height and width for blobs that use shape.dim instead
    normalize_shape(caffenet_weights)
    # Translate the rest of the model
    net, pretrained_weights = caffe_translator.TranslateModel(
        caffenet, caffenet_weights
    )
    pretrained_weights.protos.extend(bn_weights)
    return net, pretrained_weights 
Example #6
Source File: pickle_caffe_blobs.py    From Detectron with Apache License 2.0 6 votes vote down vote up
def load_and_convert_caffe_model(prototxt_file_name, caffemodel_file_name):
    caffenet = caffe_pb2.NetParameter()
    caffenet_weights = caffe_pb2.NetParameter()
    text_format.Merge(open(prototxt_file_name).read(), caffenet)
    caffenet_weights.ParseFromString(open(caffemodel_file_name).read())
    # C2 conv layers current require biases, but they are optional in C1
    # Add zeros as biases is they are missing
    add_missing_biases(caffenet_weights)
    # We only care about getting parameters, so remove layers w/o parameters
    remove_layers_without_parameters(caffenet, caffenet_weights)
    # BatchNorm is not implemented in the translator *and* we need to fold Scale
    # layers into the new C2 SpatialBN op, hence we remove the batch norm layers
    # and apply custom translations code
    bn_weights = remove_spatial_bn_layers(caffenet, caffenet_weights)
    # Set num, channel, height and width for blobs that use shape.dim instead
    normalize_shape(caffenet_weights)
    # Translate the rest of the model
    net, pretrained_weights = caffe_translator.TranslateModel(
        caffenet, caffenet_weights
    )
    pretrained_weights.protos.extend(bn_weights)
    return net, pretrained_weights 
Example #7
Source File: pickle_caffe_blobs.py    From CBNet with Apache License 2.0 6 votes vote down vote up
def load_and_convert_caffe_model(prototxt_file_name, caffemodel_file_name):
    caffenet = caffe_pb2.NetParameter()
    caffenet_weights = caffe_pb2.NetParameter()
    text_format.Merge(open(prototxt_file_name).read(), caffenet)
    caffenet_weights.ParseFromString(open(caffemodel_file_name).read())
    # C2 conv layers current require biases, but they are optional in C1
    # Add zeros as biases is they are missing
    add_missing_biases(caffenet_weights)
    # We only care about getting parameters, so remove layers w/o parameters
    remove_layers_without_parameters(caffenet, caffenet_weights)
    # BatchNorm is not implemented in the translator *and* we need to fold Scale
    # layers into the new C2 SpatialBN op, hence we remove the batch norm layers
    # and apply custom translations code
    bn_weights = remove_spatial_bn_layers(caffenet, caffenet_weights)
    # Set num, channel, height and width for blobs that use shape.dim instead
    normalize_shape(caffenet_weights)
    # Translate the rest of the model
    net, pretrained_weights = caffe_translator.TranslateModel(
        caffenet, caffenet_weights
    )
    pretrained_weights.protos.extend(bn_weights)
    return net, pretrained_weights 
Example #8
Source File: pickle_caffe_blobs.py    From NucleiDetectron with Apache License 2.0 6 votes vote down vote up
def load_and_convert_caffe_model(prototxt_file_name, caffemodel_file_name):
    caffenet = caffe_pb2.NetParameter()
    caffenet_weights = caffe_pb2.NetParameter()
    text_format.Merge(open(prototxt_file_name).read(), caffenet)
    caffenet_weights.ParseFromString(open(caffemodel_file_name).read())
    # C2 conv layers current require biases, but they are optional in C1
    # Add zeros as biases is they are missing
    add_missing_biases(caffenet_weights)
    # We only care about getting parameters, so remove layers w/o parameters
    remove_layers_without_parameters(caffenet, caffenet_weights)
    # BatchNorm is not implemented in the translator *and* we need to fold Scale
    # layers into the new C2 SpatialBN op, hence we remove the batch norm layers
    # and apply custom translations code
    bn_weights = remove_spatial_bn_layers(caffenet, caffenet_weights)
    # Set num, channel, height and width for blobs that use shape.dim instead
    normalize_shape(caffenet_weights)
    # Translate the rest of the model
    net, pretrained_weights = caffe_translator.TranslateModel(
        caffenet, caffenet_weights
    )
    pretrained_weights.protos.extend(bn_weights)
    return net, pretrained_weights 
Example #9
Source File: pickle_caffe_blobs.py    From seg_every_thing with Apache License 2.0 6 votes vote down vote up
def load_and_convert_caffe_model(prototxt_file_name, caffemodel_file_name):
    caffenet = caffe_pb2.NetParameter()
    caffenet_weights = caffe_pb2.NetParameter()
    text_format.Merge(open(prototxt_file_name).read(), caffenet)
    caffenet_weights.ParseFromString(open(caffemodel_file_name).read())
    # C2 conv layers current require biases, but they are optional in C1
    # Add zeros as biases is they are missing
    add_missing_biases(caffenet_weights)
    # We only care about getting parameters, so remove layers w/o parameters
    remove_layers_without_parameters(caffenet, caffenet_weights)
    # BatchNorm is not implemented in the translator *and* we need to fold Scale
    # layers into the new C2 SpatialBN op, hence we remove the batch norm layers
    # and apply custom translations code
    bn_weights = remove_spatial_bn_layers(caffenet, caffenet_weights)
    # Set num, channel, height and width for blobs that use shape.dim instead
    normalize_shape(caffenet_weights)
    # Translate the rest of the model
    net, pretrained_weights = caffe_translator.TranslateModel(
        caffenet, caffenet_weights
    )
    pretrained_weights.protos.extend(bn_weights)
    return net, pretrained_weights 
Example #10
Source File: opt_utils.py    From Caffe-Computation-Graph-Optimization with MIT License 6 votes vote down vote up
def DrpOut_OPT_Create_Prototxt(original_prototxt_path, original_model_path, optimized_prototxt_path):
    net_param = caffe_pb2.NetParameter()
    new_net_param = caffe_pb2.NetParameter()
    with open(original_prototxt_path, 'rt') as f:
        Parse(f.read(), net_param)
    for layer_idx in range(0, len(net_param.layer)):
        layer = net_param.layer[layer_idx]
        if layer.type == 'Dropout':
            if layer.top[0] == layer.bottom[0]:
                continue
            else:
                new_net_param.layer[-1].top[0] = layer.top[0]
        else:
            new_net_param.layer.extend([layer])
    new_net_param.name = net_param.name
    with open(optimized_prototxt_path, 'wt') as f:
        f.write(MessageToString(new_net_param))
    print "DROPOUT OPT : Create Optimized Prototxt Done."
    print bcolors.OKGREEN + "DROPOUT OPT : Model at " + original_model_path + "." + bcolors.ENDC
    print bcolors.OKGREEN + "DROPOUT OPT : Prototxt at " + optimized_prototxt_path + "." + bcolors.ENDC 
Example #11
Source File: pickle_caffe_blobs.py    From Clustered-Object-Detection-in-Aerial-Image with Apache License 2.0 6 votes vote down vote up
def load_and_convert_caffe_model(prototxt_file_name, caffemodel_file_name):
    caffenet = caffe_pb2.NetParameter()
    caffenet_weights = caffe_pb2.NetParameter()
    text_format.Merge(open(prototxt_file_name).read(), caffenet)
    caffenet_weights.ParseFromString(open(caffemodel_file_name).read())
    # C2 conv layers current require biases, but they are optional in C1
    # Add zeros as biases is they are missing
    add_missing_biases(caffenet_weights)
    # We only care about getting parameters, so remove layers w/o parameters
    remove_layers_without_parameters(caffenet, caffenet_weights)
    # BatchNorm is not implemented in the translator *and* we need to fold Scale
    # layers into the new C2 SpatialBN op, hence we remove the batch norm layers
    # and apply custom translations code
    bn_weights = remove_spatial_bn_layers(caffenet, caffenet_weights)
    # Set num, channel, height and width for blobs that use shape.dim instead
    normalize_shape(caffenet_weights)
    # Translate the rest of the model
    net, pretrained_weights = caffe_translator.TranslateModel(
        caffenet, caffenet_weights
    )
    pretrained_weights.protos.extend(bn_weights)
    return net, pretrained_weights 
Example #12
Source File: merge_bn.py    From MobileNet-SSD with MIT License 5 votes vote down vote up
def pre_process(expected_proto, new_proto):
    net_specs = caffe_pb2.NetParameter()
    net_specs2 = caffe_pb2.NetParameter()
    with open(expected_proto, "r") as fp:
        text_format.Merge(str(fp.read()), net_specs)

    net_specs2.MergeFrom(net_specs)
    layers = net_specs.layer
    num_layers = len(layers)

    for i in range(num_layers - 1, -1, -1):
         del net_specs2.layer[i]

    for idx in range(num_layers):
        l = layers[idx]
        if l.type == "BatchNorm" or l.type == "Scale":
            continue
        elif l.type == "Convolution" or l.type == "Deconvolution":
            top = find_top_after_bn(layers, l.name, l.top[0])
            bn_maps[l.name]["type"] = l.type
            layer = net_specs2.layer.add()
            layer.MergeFrom(l)
            layer.top[0] = top
            layer.convolution_param.bias_term = True
        else:
            layer = net_specs2.layer.add()
            layer.MergeFrom(l)

    with open(new_proto, "w") as fp:
        fp.write("{}".format(net_specs2)) 
Example #13
Source File: draw_net.py    From Deep-Learning-Based-Structural-Damage-Detection with MIT License 5 votes vote down vote up
def main():
    args = parse_args()
    net = caffe_pb2.NetParameter()
    text_format.Merge(open(args.input_net_proto_file).read(), net)
    print('Drawing net to %s' % args.output_image_file)
    phase=None;
    if args.phase == "TRAIN":
        phase = caffe.TRAIN
    elif args.phase == "TEST":
        phase = caffe.TEST
    elif args.phase != "ALL":
        raise ValueError("Unknown phase: " + args.phase)
    caffe.draw.draw_net_to_file(net, args.output_image_file, args.rankdir,
                                phase) 
Example #14
Source File: draw_net.py    From mix-and-match with MIT License 5 votes vote down vote up
def main():
    args = parse_args()
    net = caffe_pb2.NetParameter()
    text_format.Merge(open(args.input_net_proto_file).read(), net)
    print('Drawing net to %s' % args.output_image_file)
    caffe.draw.draw_net_to_file(net, args.output_image_file, args.rankdir) 
Example #15
Source File: proto_utils.py    From nideep with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def copy_net_params(src):

    return copy_msg(src, caffe_pb2.NetParameter) 
Example #16
Source File: proto_utils.py    From nideep with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def from_net_params_file(self, fpath):

        config = caffe_pb2.NetParameter()
        return self.from_file(fpath, config) 
Example #17
Source File: test_proto_utils.py    From nideep with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_copy_msg(self):

        x = NetParameter()
        assert_is_not_none(x)
        y = pu.copy_msg(x, NetParameter)
        assert_is_not(x, y)
        assert_is_not_none(y) 
Example #18
Source File: caffe-int8-convert-tool-dev-weight.py    From caffe-int8-convert-tools with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def weight_quantize(net, net_file, group_on):
    """
    CaffeModel convolution weight blob Int8 quantize
    Args:
        net: the instance of Caffe inference
        net_file: deploy caffe prototxt
    Returns:    
        none
    """
    print("\nQuantize the kernel weight:")

    # parse the net param from deploy prototxt
    params = caffe_pb2.NetParameter()
    with open(net_file) as f:
        text_format.Merge(f.read(), params)

    for i, layer in enumerate(params.layer):
        # find the convolution layers to get out the weight_scale
        if(layer.type == "Convolution" or layer.type == "ConvolutionDepthwise"):
            weight_blob = net.params[layer.name][0].data
            # initial the instance of QuantizeLayer Class lists,you can use enable group quantize to generate int8 scale for each group layer.convolution_param.group
            if (group_on == 1):
                quanitze_layer = QuantizeLayer(layer.name, layer.bottom[0], layer.convolution_param.num_output)
            else:
                quanitze_layer = QuantizeLayer(layer.name, layer.bottom[0], 1)
            # quantize the weight value using 6bit for conv3x3s1 layer to winograd F(4,3) 
            if(layer.type == "Convolution" and layer.convolution_param.kernel_size[0] == 3 and ((len(layer.convolution_param.stride) == 0) or layer.convolution_param.stride[0] == 1)):
                if(layer.convolution_param.group != layer.convolution_param.num_output):
                    quanitze_layer.quantize_weight(weight_blob, True)
                else:
                    quanitze_layer.quantize_weight(weight_blob, False)
            # quantize the weight value using 8bit for another conv layers 
            else:
                quanitze_layer.quantize_weight(weight_blob, False)
            # add the quantize_layer into the save list
            quantize_layer_lists.append(quanitze_layer)

    return None 
Example #19
Source File: test_draw.py    From Deep-Learning-Based-Structural-Damage-Detection with MIT License 5 votes vote down vote up
def test_draw_net(self):
        for filename in getFilenames():
            net = caffe_pb2.NetParameter()
            with open(filename) as infile:
                text_format.Merge(infile.read(), net)
            caffe.draw.draw_net(net, 'LR') 
Example #20
Source File: caffe_parser.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 5 votes vote down vote up
def get_layers(proto):
    """Returns layers in a caffe_pb2.NetParameter object
    """
    if len(proto.layer):
        return proto.layer
    elif len(proto.layers):
        return proto.layers
    else:
        raise ValueError('Invalid proto file.') 
Example #21
Source File: summarize.py    From Deep-Learning-Based-Structural-Damage-Detection with MIT License 5 votes vote down vote up
def read_net(filename):
    net = caffe_pb2.NetParameter()
    with open(filename) as f:
        protobuf.text_format.Parse(f.read(), net)
    return net 
Example #22
Source File: complexity.py    From pynetbuilder with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def get_complexity(netspec=None, prototxt_file=None):
    # One of netspec, or prototxt_path params should not be None
    assert (netspec is not None) or (prototxt_file is not None)

    if netspec is not None:
        prototxt_file = _create_file_from_netspec(netspec)
    net = caffe.Net(prototxt_file, caffe.TEST)

    total_params = 0
    total_flops = 0

    net_params = caffe_pb2.NetParameter()
    text_format.Merge(open(prototxt_file).read(), net_params)

    for layer in net_params.layer:
        if layer.name in net.params:

            params = net.params[layer.name][0].data.size
            # If convolution layer, multiply flops with receptive field
            # i.e. #params * datawidth * dataheight
            if layer.type == 'Convolution':  # 'conv' in layer:
                data_width = net.blobs[layer.name].data.shape[2]
                data_height = net.blobs[layer.name].data.shape[3]
                flops = net.params[layer.name][0].data.size * data_width * data_height
                # print >> sys.stderr, layer.name, params, flops
            else:
                flops = net.params[layer.name][0].data.size

            total_params += params
            total_flops += flops



    if netspec is not None:
        os.remove(prototxt_file)

    return total_params, total_flops 
Example #23
Source File: caffe_parser.py    From SNIPER-mxnet with Apache License 2.0 5 votes vote down vote up
def read_prototxt(fname):
    """Return a caffe_pb2.NetParameter object that defined in a prototxt file
    """
    proto = caffe_pb2.NetParameter()
    with open(fname, 'r') as f:
        text_format.Merge(str(f.read()), proto)
    return proto 
Example #24
Source File: caffe_parser.py    From SNIPER-mxnet with Apache License 2.0 5 votes vote down vote up
def get_layers(proto):
    """Returns layers in a caffe_pb2.NetParameter object
    """
    if len(proto.layer):
        return proto.layer
    elif len(proto.layers):
        return proto.layers
    else:
        raise ValueError('Invalid proto file.') 
Example #25
Source File: caffe_parser.py    From SNIPER-mxnet with Apache License 2.0 5 votes vote down vote up
def read_caffemodel(prototxt_fname, caffemodel_fname):
    """Return a caffe_pb2.NetParameter object that defined in a binary
    caffemodel file
    """
    if use_caffe:
        caffe.set_mode_cpu()
        net = caffe.Net(prototxt_fname, caffemodel_fname, caffe.TEST)
        layer_names = net._layer_names
        layers = net.layers
        return (layers, layer_names)
    else:
        proto = caffe_pb2.NetParameter()
        with open(caffemodel_fname, 'rb') as f:
            proto.ParseFromString(f.read())
        return (get_layers(proto), None) 
Example #26
Source File: caffe_parser.py    From SNIPER-mxnet with Apache License 2.0 5 votes vote down vote up
def read_prototxt(fname):
    """Return a caffe_pb2.NetParameter object that defined in a prototxt file
    """
    proto = caffe_pb2.NetParameter()
    with open(fname, 'r') as f:
        text_format.Merge(str(f.read()), proto)
    return proto 
Example #27
Source File: caffe_parser.py    From SNIPER-mxnet with Apache License 2.0 5 votes vote down vote up
def read_caffemodel(prototxt_fname, caffemodel_fname):
    """Return a caffe_pb2.NetParameter object that defined in a binary
    caffemodel file
    """
    if use_caffe:
        caffe.set_mode_cpu()
        net = caffe.Net(prototxt_fname, caffemodel_fname, caffe.TEST)
        layer_names = net._layer_names
        layers = net.layers
        return (layers, layer_names)
    else:
        proto = caffe_pb2.NetParameter()
        with open(caffemodel_fname, 'rb') as f:
            proto.ParseFromString(f.read())
        return (get_layers(proto), None) 
Example #28
Source File: builder.py    From channel-pruning with MIT License 5 votes vote down vote up
def __init__(self, name="network", pt=None):
        self.net = caffe_pb2.NetParameter()
        if pt is None:
            self.net.name = name
        else:
            with open(pt, 'rt') as f:
                pb2.text_format.Merge(f.read(), self.net)
        self.bottom = None
        self.cur = None
        self.this = None

        self._layer = None
        self._bottom = None 
Example #29
Source File: caffe_parser.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 5 votes vote down vote up
def read_prototxt(fname):
    """Return a caffe_pb2.NetParameter object that defined in a prototxt file
    """
    proto = caffe_pb2.NetParameter()
    with open(fname, 'r') as f:
        text_format.Merge(str(f.read()), proto)
    return proto 
Example #30
Source File: caffe_parser.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 5 votes vote down vote up
def read_caffemodel(prototxt_fname, caffemodel_fname):
    """Return a caffe_pb2.NetParameter object that defined in a binary
    caffemodel file
    """
    if use_caffe:
        caffe.set_mode_cpu()
        net = caffe.Net(prototxt_fname, caffemodel_fname, caffe.TEST)
        layer_names = net._layer_names
        layers = net.layers
        return (layers, layer_names)
    else:
        proto = caffe_pb2.NetParameter()
        with open(caffemodel_fname, 'rb') as f:
            proto.ParseFromString(f.read())
        return (get_layers(proto), None)