org.tensorflow.framework.GraphDef Java Examples

The following examples show how to use org.tensorflow.framework.GraphDef. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LocalResponseNormalization.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {

    val aAlpha = nodeDef.getAttrOrThrow("alpha");
    val aBeta = nodeDef.getAttrOrThrow("beta");
    val aBias = nodeDef.getAttrOrThrow("bias");
    val aDepth = nodeDef.getAttrOrThrow("depth_radius");

    val alpha = aAlpha.getF();
    val beta = aBeta.getF();
    val bias = aBias.getF();
    val depth = aDepth.getF();

    LocalResponseNormalizationConfig localResponseNormalizationConfig = LocalResponseNormalizationConfig.builder()
            .alpha(alpha)
            .beta(beta)
            .bias(bias)
            .depth((int) depth)
            .build();
    this.config = localResponseNormalizationConfig;
    addArgs();
}
 
Example #2
Source File: Mmul.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    super.initFromTensorFlow(nodeDef, initWith, attributesForNode, graph);
    val isTransposeA = attributesForNode.get("transpose_a").getB();
    val isTransposeB = attributesForNode.get("transpose_b").getB();
    MMulTranspose mMulTranspose = MMulTranspose.builder()
            .transposeA(isTransposeA).transposeB(isTransposeB)
            .build();
    this.mMulTranspose = mMulTranspose;
    val args = args();
    for(val arg : args) {
        if(sameDiff.isPlaceHolder(arg.getVarName()) || arg.getShape() == null) {
            sameDiff.addPropertyToResolve(this,arg.getVarName());
        }
    }
}
 
Example #3
Source File: TensorArray.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    val idd = nodeDef.getInput(nodeDef.getInputCount() - 1);
    NodeDef iddNode = null;
    for(int i = 0; i < graph.getNodeCount(); i++) {
        if(graph.getNode(i).getName().equals(idd)) {
            iddNode = graph.getNode(i);
        }
    }

    val arr = TFGraphMapper.getNDArrayFromTensor(iddNode);

    if (arr != null) {
        int idx = arr.getInt(0);
        addIArgument(idx);
    }

    this.tensorArrayDataType = TFGraphMapper.convertType(attributesForNode.get("dtype").getType());
}
 
Example #4
Source File: LocalResponseNormalization.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {

    val aAlpha = nodeDef.getAttrOrThrow("alpha");
    val aBeta = nodeDef.getAttrOrThrow("beta");
    val aBias = nodeDef.getAttrOrThrow("bias");
    val aDepth = nodeDef.getAttrOrThrow("depth_radius");

    double alpha = aAlpha.getF();
    double beta = aBeta.getF();
    double bias = aBias.getF();
    int depth = (int)aDepth.getI();

    LocalResponseNormalizationConfig localResponseNormalizationConfig = LocalResponseNormalizationConfig.builder()
            .alpha(alpha)
            .beta(beta)
            .bias(bias)
            .depth((int) depth)
            .build();
    this.config = localResponseNormalizationConfig;
    addArgs();
}
 
Example #5
Source File: SplitV.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    val splitDim = TFGraphMapper.getArrayFrom(TFGraphMapper.getNodeWithNameFromGraph(graph,nodeDef.getInput(0)),graph);
    if(splitDim != null) {
        this.splitDim = splitDim.getInt(0);
        addIArgument(splitDim.getInt(0));
    }

    val numSplits = (int) attributesForNode.get("num_split").getI();
    this.numSplit = numSplits;
    //addIArgument(numSplits);  //libnd4j op doesn't used/need it for execution
}
 
Example #6
Source File: TopK.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {

    String thisName = nodeDef.getName();

    // FIXME: ????
    String inputName = thisName + "/k";
    NodeDef kNode = null;
    for(int i = 0; i < graph.getNodeCount(); i++) {
        if(graph.getNode(i).getName().equals(inputName)){
            kNode = graph.getNode(i);
            break;
        }
    }

    this.sorted = nodeDef.getAttrOrThrow("sorted").getB();

    if (kNode != null) {
        Preconditions.checkState(kNode != null, "Could not find 'k' parameter node for op: %s", thisName);

        INDArray arr = TFGraphMapper.getNDArrayFromTensor(kNode);
        this.k = arr.getInt(0);

        addIArgument(ArrayUtil.fromBoolean(sorted), k);
    } else
        addIArgument(ArrayUtil.fromBoolean(sorted));
}
 
Example #7
Source File: Mmul.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    super.initFromTensorFlow(nodeDef, initWith, attributesForNode, graph);

    boolean isTransposeA;
    boolean isTransposeB;
    if(nodeDef.getOp().equalsIgnoreCase("MatMul")){
        isTransposeA = attributesForNode.get("transpose_a").getB();
        isTransposeB = attributesForNode.get("transpose_b").getB();

    } else {
        //BatchMatMul, BatchMatMulV2
        //In practice, BatchMatMul seems to use "adj_x" and "adj_y" instead of "transpose_a" and "transpose_b"
        if(attributesForNode.containsKey("transpose_a")){
            isTransposeA = attributesForNode.get("transpose_a").getB();
        } else {
            isTransposeA = attributesForNode.get("adj_x").getB();
        }
        if(attributesForNode.containsKey("transpose_b")){
            isTransposeB = attributesForNode.get("transpose_b").getB();
        } else {
            isTransposeB = attributesForNode.get("adj_y").getB();
        }
    }
    MMulTranspose mMulTranspose = MMulTranspose.builder()
            .transposeA(isTransposeA).transposeB(isTransposeB)
            .build();
    this.mt = mMulTranspose;
    iArguments.clear();
    addIArgument(ArrayUtil.fromBoolean(mt.isTransposeA()), ArrayUtil.fromBoolean(mt.isTransposeB()));
}
 
Example #8
Source File: Range.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    super.initFromTensorFlow(nodeDef, initWith, attributesForNode, graph);
    if(attributesForNode.containsKey("Tidx")){
        dataType = TFGraphMapper.convertType(attributesForNode.get("Tidx").getType());
    }
    addDArgument(dataType);
}
 
Example #9
Source File: BitCast.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph);
    val t = nodeDef.getAttrOrDefault("type", null);
    val type = ArrayOptionsHelper.convertToDataType(t.getType());
    addIArgument(type.toInt());

    dtype = type;
}
 
Example #10
Source File: Split.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    val numSplits = (int) attributesForNode.get("num_split").getI();
    this.numSplit = numSplits;
    addIArgument(numSplits);

    val splitDim = TFGraphMapper.getInstance().getArrayFrom(TFGraphMapper.getInstance().getNodeWithNameFromGraph(graph,nodeDef.getInput(0)),graph);
    if(splitDim != null) {
        this.splitDim = splitDim.getInt(0);
        addIArgument(splitDim.getInt(0));
    }



}
 
Example #11
Source File: GraphImporter.java    From vespa with Apache License 2.0 5 votes vote down vote up
private static List<IntermediateOperation> importControlInputs(NodeDef node,
                                                               GraphDef tfGraph,
                                                               IntermediateGraph intermediateGraph,
                                                               SavedModelBundle bundle) {
    return node.getInputList().stream()
            .filter(nodeName -> isControlDependency(nodeName))
            .map(nodeName -> importOperation(nodeName, tfGraph, intermediateGraph, bundle))
            .collect(Collectors.toList());
}
 
Example #12
Source File: ScatterMax.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph);

    if (nodeDef.containsAttr("use_locking")) {
        if (nodeDef.getAttrOrThrow("use_locking").getB() == true) {
            bArguments.add(true);
        } else {
            bArguments.add(false);
        }
    } else
        bArguments.add(false);
}
 
Example #13
Source File: ArgAmin.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    if(attributesForNode.containsKey("output_type")) {
        outputType = TFGraphMapper.convertType(attributesForNode.get("output_type").getType());
    } else {
        outputType = DataType.LONG;
    }
}
 
Example #14
Source File: Pad.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    //Based on TF codebase: gen_array_ops.mirror_pad is osed for BOTH REFLECT and SYMMETRIC mode. Hence only constant being imported here
    this.mode = Mode.CONSTANT;
    addIArgument(mode.ordinal());
    //Constant value is resolved just before execution
}
 
Example #15
Source File: CheckNumerics.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    String str = attributesForNode.get("message").getS().toStringUtf8();
    //No "string args" support in libnd4j custom ops -> make it a constant instead
    String name = nodeDef.getName();
    SDVariable msg = initWith.constant(name + "/message", Nd4j.scalar(str));
    List<String> newInputs = new ArrayList<>(2);
    newInputs.addAll(initWith.getOps().get(name).getInputsToOp());
    newInputs.add(msg.name());
    initWith.getOps().get(name).setInputsToOp(newInputs);
    initWith.getVariables().get(msg.name()).setInputsForOp(Collections.singletonList(getOwnName()));    }
 
Example #16
Source File: ArgMin.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    if(attributesForNode.containsKey("output_type")) {
        outputType = TFGraphMapper.convertType(attributesForNode.get("output_type").getType());
    } else {
        outputType = DataType.LONG;
    }
}
 
Example #17
Source File: ScatterMul.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph);

    if (nodeDef.containsAttr("use_locking")) {
        if (nodeDef.getAttrOrThrow("use_locking").getB() == true) {
            bArguments.add(true);
        } else {
            bArguments.add(false);
        }
    } else
        bArguments.add(false);
}
 
Example #18
Source File: FakeQuantWithMinMaxVars.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    if(attributesForNode.containsKey("narrow_range")){
        this.narrowRange = attributesForNode.get("narrow_range").getB();
    }
    this.numBits = (int)attributesForNode.get("num_bits").getI();
    addArgs();
}
 
Example #19
Source File: NthElement.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph);

    this.reverse = attributesForNode.get("reverse").getB();
    addArgs();
}
 
Example #20
Source File: ScatterNdAdd.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph);

    if (nodeDef.containsAttr("use_locking")) {
        if (nodeDef.getAttrOrThrow("use_locking").getB() == true) {
            bArguments.add(true);
        } else {
            bArguments.add(false);
        }
    } else
        bArguments.add(false);
}
 
Example #21
Source File: TensorMmul.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    super.initFromTensorFlow(nodeDef, initWith, attributesForNode, graph);
    /**
     * name: "MatMul"
     op: "MatMul"
     input: "input"
     input: "Variable/read"
     attr {
     key: "transpose_b"
     value {
     b: false
     }
     }
     attr {
     key: "transpose_a"
     value {
     b: false
     }
     }
     attr {
     key: "T"
     value {
     type: DT_FLOAT
     }
     }

     */

    val isTransposeA = attributesForNode.get("transpose_a").getB();
    val isTransposeB = attributesForNode.get("transpose_b").getB();
    MMulTranspose mMulTranspose = MMulTranspose.builder()
            .transposeA(isTransposeA).transposeB(isTransposeB)
            .build();
    this.mMulTranspose = mMulTranspose;
    val args = args();
}
 
Example #22
Source File: ResizeBilinear.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph);

    val attrC = attributesForNode.get("align_corners");
    val attrH = attributesForNode.get("half_pixel_centers");

    this.alignCorners = attrC != null ? attrC.getB() : false;
    this.halfPixelCenters = attrH != null ? attrH.getB() : false;

    addArgs();
}
 
Example #23
Source File: Pooling2D.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    val aStrides = nodeDef.getAttrOrThrow("strides");
    val tfStrides = aStrides.getList().getIList();
    val sH = tfStrides.get(1);
    val sW = tfStrides.get(2);

    val aKernels = nodeDef.getAttrOrThrow("ksize");
    val tfKernels = aKernels.getList().getIList();

    val kH = tfKernels.get(1);
    val kW = tfKernels.get(2);

    val aPadding = nodeDef.getAttrOrThrow("padding");
    val padding = aPadding.getList().getIList();

    val paddingMode = aPadding.getS().toStringUtf8().replaceAll("\"","");

    boolean isSameMode = paddingMode.equalsIgnoreCase("SAME");

    if (!isSameMode)
        log.debug("Mode: {}", paddingMode);

    Pooling2DConfig pooling2DConfig = Pooling2DConfig.builder()
            .sH(sH.intValue())
            .sW(sW.intValue())
            .type(null)
            .isSameMode(isSameMode)
            .kH(kH.intValue())
            .kW(kW.intValue())
            .pH(padding.get(0).intValue())
            .pW(padding.get(1).intValue())
            .build();
    this.config = pooling2DConfig;
    addArgs();
    log.debug("Pooling: k: [{},{}]; s: [{}, {}], padding: {}", kH, kW, sH, sW, aPadding);


}
 
Example #24
Source File: ExtractImagePatches.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    //TF includes redundant leading and training 1s for kSizes, strides, rates (positions 0/3)
    kSizes = parseIntList(attributesForNode.get("ksizes").getList());
    strides = parseIntList(attributesForNode.get("strides").getList());
    rates = parseIntList(attributesForNode.get("rates").getList());
    String s = attributesForNode.get("padding").getS().toStringUtf8();
    isSameMode = s.equalsIgnoreCase("SAME");
    addArgs();
}
 
Example #25
Source File: MirrorPad.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph);
    iArguments.add(isSymmetric ? 1L : 0L);
}
 
Example #26
Source File: ResizeNearestNeighbor.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph);
}
 
Example #27
Source File: RandomGamma.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
        outputDataType = DataTypeAdapter.dtypeConv(attributesForNode.get("T").getType());
}
 
Example #28
Source File: Pooling3D.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    val aStrides = nodeDef.getAttrOrThrow("strides");
    List<Long> tfStrides = aStrides.getList().getIList();
    val aKernels = nodeDef.getAttrOrThrow("ksize");
    List<Long> tfKernels = aKernels.getList().getIList();
    val aPadding = nodeDef.getAttrOrThrow("padding");
    List<Long> tfPadding = aPadding.getList().getIList();

    String paddingMode = aPadding.getS().toStringUtf8().replaceAll("\"", "");

    boolean isSameMode = paddingMode.equalsIgnoreCase("SAME");

    String data_format = "ndhwc";
    if (nodeDef.containsAttr("data_format")) {
        val attr = nodeDef.getAttrOrThrow("data_format");

        data_format = attr.getS().toStringUtf8().toLowerCase();
    }

    //Order: depth, height, width
    //TF doesn't have dilation, it seems?
    int[] strides = new int[3];
    int[] padding = new int[3];
    int[] kernel = new int[3];
    for( int i=0; i<3; i++ ) {
        //TF values here have 5 values: minibatch and Channels at positions 0 and 4, which are almost always 1
        strides[i] = tfStrides.get(i+1).intValue();
        if(tfPadding != null && tfPadding.size() > 0) {
            //Empty for SAME mode
            padding[i] = tfPadding.get(i + 1).intValue();
        }
        kernel[i] = tfKernels.get(i+1).intValue();
    }

    Pooling3DType type;
    String name = nodeDef.getOp().toLowerCase();
    if(name.startsWith("max")){
        type = Pooling3DType.MAX;
    } else if(name.startsWith("av")){
        type = Pooling3DType.AVG;
    } else {
        throw new IllegalStateException("Unknown or not supported pooling type: " + name);
    }

    Pooling3DConfig conf = Pooling3DConfig.builder()
            .sD(strides[0]).sH(strides[1]).sW(strides[2])
            .pD(padding[0]).pH(padding[1]).pW(padding[2])
            .kD(kernel[0]).kH(kernel[1]).kW(kernel[2])
            .type(type)
            .isSameMode(isSameMode)
            .isNCDHW(data_format.equalsIgnoreCase("ncdhw"))
            .build();
    this.config = conf;
    addArgs();
}
 
Example #29
Source File: SRUCell.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    super.initFromTensorFlow(nodeDef, initWith, attributesForNode, graph);
}
 
Example #30
Source File: Enter.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
    super.initFromTensorFlow(nodeDef, initWith, attributesForNode, graph);
}