Java Code Examples for org.nd4j.autodiff.samediff.SameDiff#placeHolder()

The following examples show how to use org.nd4j.autodiff.samediff.SameDiff#placeHolder() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ActivationGradChecks.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testActivationGradientCheck2(){
    Nd4j.getRandom().setSeed(12345);
    SameDiff sd = SameDiff.create();
    SDVariable x = sd.placeHolder("x", DataType.DOUBLE, 3, 4);
    SDVariable y = sd.var("y", Nd4j.rand(DataType.DOUBLE, 4, 5));
    SDVariable mmul = x.mmul("mmul", y);
    SDVariable sigmoid = sd.math().tanh("sigmoid", mmul);
    SDVariable loss = sigmoid.std(true);

    Map<String, INDArray> m = new HashMap<>();
    m.put("x", Nd4j.rand(DataType.DOUBLE, 3, 4));

    GradCheckUtil.ActGradConfig c = GradCheckUtil.ActGradConfig.builder()
            .sd(sd)
            .placeholderValues(m)
            .activationGradsToCheck(Arrays.asList("sigmoid", "mmul"))
            .subset(GradCheckUtil.Subset.RANDOM)
            .maxPerParam(10)
            .build();

    boolean ok = GradCheckUtil.checkActivationGradients(c);

    assertTrue(ok);
}
 
Example 2
Source File: UIListenerTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
private static SameDiff getSimpleNet(){
    Nd4j.getRandom().setSeed(12345);
    SameDiff sd = SameDiff.create();
    SDVariable in = sd.placeHolder("in", DataType.FLOAT, -1, 4);
    SDVariable label = sd.placeHolder("label", DataType.FLOAT, -1, 3);
    SDVariable w = sd.var("W", Nd4j.rand(DataType.FLOAT, 4, 3));
    SDVariable b = sd.var("b", DataType.FLOAT, 1, 3);
    SDVariable mmul = in.mmul(w).add(b);
    SDVariable softmax = sd.nn.softmax("softmax", mmul);
    SDVariable loss = sd.loss().logLoss("loss", label, softmax);

    sd.setTrainingConfig(TrainingConfig.builder()
            .dataSetFeatureMapping("in")
            .dataSetLabelMapping("label")
            .updater(new Adam(1e-1))
            .weightDecay(1e-3, true)
            .build());
    return sd;
}
 
Example 3
Source File: CheckpointListenerTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public static SameDiff getModel(){
    Nd4j.getRandom().setSeed(12345);
    SameDiff sd = SameDiff.create();
    SDVariable in = sd.placeHolder("in", DataType.FLOAT, -1, 4);
    SDVariable label = sd.placeHolder("label", DataType.FLOAT, -1, 3);
    SDVariable w = sd.var("W", Nd4j.rand(DataType.FLOAT, 4, 3));
    SDVariable b = sd.var("b", DataType.FLOAT, 3);

    SDVariable mmul = in.mmul(w).add(b);
    SDVariable softmax = sd.nn().softmax(mmul);
    SDVariable loss = sd.loss().logLoss("loss", label, softmax);

    sd.setTrainingConfig(TrainingConfig.builder()
            .dataSetFeatureMapping("in")
            .dataSetLabelMapping("label")
            .updater(new Adam(1e-2))
            .weightDecay(1e-2, true)
            .build());

    return sd;
}
 
Example 4
Source File: SameDiffVerticleClassificationMetricsTest.java    From konduit-serving with Apache License 2.0 5 votes vote down vote up
@Override
public JsonObject getConfigObject() throws Exception {
    SameDiff sameDiff = SameDiff.create();
    SDVariable x = sameDiff.placeHolder("x", DataType.FLOAT, 2);
    SDVariable y = sameDiff.placeHolder("y", DataType.FLOAT, 2);
    SDVariable add = x.add("output", y);
    File tmpSameDiffFile = temporary.newFile();
    sameDiff.asFlatFile(tmpSameDiffFile);
    SameDiff values = SameDiff.fromFlatFile(tmpSameDiffFile);

    ServingConfig servingConfig = ServingConfig.builder()
            .outputDataFormat(Output.DataFormat.ND4J)
            .metricsConfigurations(Collections.singletonList(ClassificationMetricsConfig.builder()
                    .classificationLabels(Arrays.asList("0", "1")).build()))
            .metricTypes(Collections.singletonList(MetricType.CLASSIFICATION))
            .httpPort(port)
            .build();

    SameDiffStep modelPipelineConfig = SameDiffStep.builder()
            .path(tmpSameDiffFile.getAbsolutePath())
            .inputNames(Arrays.asList("x", "y"))
            .outputNames(Collections.singletonList("output"))
            .build();

    InferenceConfiguration inferenceConfiguration = InferenceConfiguration.builder()
            .servingConfig(servingConfig)
            .step(modelPipelineConfig)
            .build();

    return new JsonObject(inferenceConfiguration.toJson());
}
 
Example 5
Source File: LossOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void TestStdLossMixedDataType(){
    // Default Data Type in this test suite is Double.
    // This test used to throw an Exception that we have mixed data types.

    SameDiff sd = SameDiff.create();
    SDVariable v = sd.placeHolder("x", DataType.FLOAT, 3,4);
    SDVariable loss = v.std(true);
}
 
Example 6
Source File: ExecDebuggingListenerTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testExecDebugListener(){

        SameDiff sd = SameDiff.create();
        SDVariable in = sd.placeHolder("in", DataType.FLOAT, -1, 3);
        SDVariable label = sd.placeHolder("label", DataType.FLOAT, 1, 2);
        SDVariable w = sd.var("w", Nd4j.rand(DataType.FLOAT, 3, 2));
        SDVariable b = sd.var("b", Nd4j.rand(DataType.FLOAT, 1, 2));
        SDVariable sm = sd.nn.softmax("softmax", in.mmul(w).add(b));
        SDVariable loss = sd.loss.logLoss("loss", label, sm);

        INDArray i = Nd4j.rand(DataType.FLOAT, 1, 3);
        INDArray l = Nd4j.rand(DataType.FLOAT, 1, 2);

        sd.setTrainingConfig(TrainingConfig.builder()
                .dataSetFeatureMapping("in")
                .dataSetLabelMapping("label")
                .updater(new Adam(0.001))
                .build());

        for(ExecDebuggingListener.PrintMode pm : ExecDebuggingListener.PrintMode.values()){
            sd.setListeners(new ExecDebuggingListener(pm, -1, true));
//            sd.output(m, "softmax");
            sd.fit(new DataSet(i, l));

            System.out.println("\n\n\n");
        }

    }
 
Example 7
Source File: ListenerTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testCustomListener() {
    SameDiff sd = SameDiff.create();
    SDVariable in = sd.placeHolder("input", DataType.FLOAT, -1, 4);
    SDVariable label = sd.placeHolder("label", DataType.FLOAT, -1, 3);
    SDVariable w = sd.var("w", Nd4j.rand(DataType.FLOAT, 4, 3));
    SDVariable b = sd.var("b", Nd4j.rand(DataType.FLOAT, 3));
    SDVariable z = sd.nn().linear("z", in, w, b);
    SDVariable out = sd.nn().softmax("out", z, 1);
    SDVariable loss = sd.loss().softmaxCrossEntropy("loss", label, out, null);

    //Create and set the training configuration
    double learningRate = 1e-3;
    TrainingConfig config = new TrainingConfig.Builder()
            .l2(1e-4)                               //L2 regularization
            .updater(new Adam(learningRate))        //Adam optimizer with specified learning rate
            .dataSetFeatureMapping("input")         //DataSet features array should be associated with variable "input"
            .dataSetLabelMapping("label")           //DataSet label array should be associated with variable "label
            .addEvaluations(false,"out",0,new Evaluation())
            .build();
    sd.setTrainingConfig(config);

    CustomListener listener = new CustomListener();
    Map<String,INDArray> m = sd.output()
            .data(new IrisDataSetIterator(150, 150))
            .output("out")
            .listeners(listener)
            .exec();

    assertEquals(1, m.size());
    assertTrue(m.containsKey("out"));
    assertNotNull(listener.z);
    assertNotNull(listener.out);

}
 
Example 8
Source File: TestSessions.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testMergeSimple(){
        //This isn't really a sensible graph, as merge op behaviour is undefined when multiple inputs are available...

        SameDiff sd = SameDiff.create();
        SDVariable ph1 = sd.placeHolder("x", DataType.FLOAT, 3,3);
        SDVariable ph2 = sd.placeHolder("y", DataType.FLOAT, 3,3);

        SDVariable merge = sd.merge(ph1, ph2);

        SDVariable outVar = sd.identity(merge);

        INDArray x = Nd4j.linspace(1, 9, 9).castTo(DataType.FLOAT).reshape(3,3);
        INDArray y = Nd4j.linspace(0.0, 0.9, 9, DataType.DOUBLE).castTo(DataType.FLOAT).reshape(3,3);
//        ph1.setArray(x);
//        ph2.setArray(y);
//        INDArray out = sd.execAndEndResult();
//        System.out.println(out);


        Map<String,INDArray> m = new HashMap<>();
        m.put("x", x);
        m.put("y", y);

        InferenceSession is = new InferenceSession(sd);
//        String outName = merge.name();
        String outName = outVar.name();
        Map<String,INDArray> outMap = is.output(Collections.singletonList(outName), m, null,
                Collections.<String>emptyList(), null, At.defaultAt(Operation.TRAINING));

        assertEquals(1, outMap.size());
        INDArray out = outMap.get(outName);
        assertTrue(x.equals(out) || y.equals(out));
    }
 
Example 9
Source File: TestSessions.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testInferenceSessionBasic2(){
    //So far: trivial test to check execution order

    SameDiff sd = SameDiff.create();
    SDVariable ph1 = sd.placeHolder("x", DataType.FLOAT, 3,3);
    SDVariable ph2 = sd.placeHolder("y", DataType.FLOAT, 3,3);

    SDVariable a = ph1.add("a", ph2);
    SDVariable b = ph1.mmul("b", ph2);
    SDVariable c = ph1.sub("c", ph2);
    SDVariable d = a.add("d", b);

    //To get array d - need to execute: a, b, d - NOT the sub op (c)

    //NOTE: normally sessions are internal and completely hidden from users

    InferenceSession is = new InferenceSession(sd);
    INDArray x = Nd4j.linspace(1, 9, 9).castTo(DataType.FLOAT).reshape(3,3);
    INDArray y = Nd4j.linspace(0.0, 0.9, 9, DataType.DOUBLE).castTo(DataType.FLOAT).reshape(3,3);

    INDArray aExp = x.add(y);
    INDArray bExp = x.mmul(y);
    INDArray dExp = aExp.add(bExp);

    Map<String,INDArray> m = new HashMap<>();
    m.put("x", x);
    m.put("y", y);

    Map<String,INDArray> outMap = is.output(Collections.singletonList("d"), m, null,
            Collections.<String>emptyList(), null, At.defaultAt(Operation.TRAINING));

    assertEquals(1, outMap.size());
    assertEquals(dExp, outMap.get("d"));
}
 
Example 10
Source File: TestSameDiffServing.java    From konduit-serving with Apache License 2.0 5 votes vote down vote up
public static SameDiff getModel(){
    Nd4j.getRandom().setSeed(12345);
    SameDiff sd = SameDiff.create();
    SDVariable in = sd.placeHolder("in", DataType.FLOAT, -1, 784);

    SDVariable w1 = sd.var("w1", Nd4j.rand(DataType.FLOAT, 784, 100));
    SDVariable b1 = sd.var("b1", Nd4j.rand(DataType.FLOAT, 100));
    SDVariable a1 = sd.nn.tanh(in.mmul(w1).add(b1));

    SDVariable w2 = sd.var("w2", Nd4j.rand(DataType.FLOAT, 100, 10));
    SDVariable b2 = sd.var("b2", Nd4j.rand(DataType.FLOAT, 10));
    SDVariable out = sd.nn.softmax("out", a1.mmul(w2).add(b2));
    return sd;
}
 
Example 11
Source File: SameDiffVerticleNd4jTest.java    From konduit-serving with Apache License 2.0 5 votes vote down vote up
@Override
public JsonObject getConfigObject() throws Exception {
    SameDiff sameDiff = SameDiff.create();
    SDVariable x = sameDiff.placeHolder("x", DataType.FLOAT, 2);
    SDVariable y = sameDiff.placeHolder("y", DataType.FLOAT, 2);
    SDVariable add = x.add("output", y);
    File tmpSameDiffFile = temporary.newFile();
    sameDiff.asFlatFile(tmpSameDiffFile);
    SameDiff values = SameDiff.fromFlatFile(tmpSameDiffFile);

    ServingConfig servingConfig = ServingConfig.builder()
            .outputDataFormat(Output.DataFormat.ND4J)
            .httpPort(port)
            .build();

    SameDiffStep modelPipelineConfig = SameDiffStep.builder()
            .path(tmpSameDiffFile.getAbsolutePath())
            .inputNames(Arrays.asList("x", "y"))
            .outputNames(Collections.singletonList("output"))
            .build();

    InferenceConfiguration inferenceConfiguration = InferenceConfiguration.builder()
            .servingConfig(servingConfig)
            .step(modelPipelineConfig)
            .build();

    return new JsonObject(inferenceConfiguration.toJson());
}
 
Example 12
Source File: SameDiffVerticleNumpyTest.java    From konduit-serving with Apache License 2.0 5 votes vote down vote up
@Override
public JsonObject getConfigObject() throws Exception {
    SameDiff sameDiff = SameDiff.create();
    SDVariable x = sameDiff.placeHolder("x", DataType.FLOAT,  2);
    SDVariable y = sameDiff.placeHolder("y", DataType.FLOAT,  2);
    SDVariable add = x.add("output", y);
    File tmpSameDiffFile = temporary.newFile();
    sameDiff.asFlatFile(tmpSameDiffFile);

    ServingConfig servingConfig = ServingConfig.builder()
            .outputDataFormat(Output.DataFormat.NUMPY)
            .httpPort(port)
            .build();

    SameDiffStep config = SameDiffStep.builder()
            .path(tmpSameDiffFile.getAbsolutePath())
            .inputNames(Arrays.asList("x", "y"))
            .outputNames(Collections.singletonList("output"))
            .build();

    InferenceConfiguration inferenceConfiguration = InferenceConfiguration.builder()
            .servingConfig(servingConfig)
            .step(config)
            .build();

    return new JsonObject(inferenceConfiguration.toJson());
}
 
Example 13
Source File: SameDiffInferenceExecutionerTests.java    From konduit-serving with Apache License 2.0 5 votes vote down vote up
@Test(timeout = 60000)

    public void testSameDiff() throws Exception {
        SameDiffInferenceExecutioner sameDiffInferenceExecutioner = new SameDiffInferenceExecutioner();
        SameDiff sameDiff = SameDiff.create();
        SDVariable input1 = sameDiff.placeHolder("input1", DataType.FLOAT,2, 2);
        SDVariable input2 = sameDiff.placeHolder("input2", DataType.FLOAT,2, 2);
        SDVariable result = input1.add("output", input2);
        INDArray input1Arr = Nd4j.linspace(1, 4, 4).reshape(2, 2);
        INDArray input2Arr = Nd4j.linspace(1, 4, 4).reshape(2, 2);
        sameDiff.associateArrayWithVariable(input1Arr, input1.name());
        sameDiff.associateArrayWithVariable(input2Arr, input2.name());
        Map<String, INDArray> indArrays = new LinkedHashMap<>();
        indArrays.put(input1.name(), input1Arr);
        indArrays.put(input2.name(), input2Arr);
        Map<String, INDArray> outputs = sameDiff.outputAll(indArrays);
        assertEquals(3, outputs.size());

        ParallelInferenceConfig parallelInferenceConfig = ParallelInferenceConfig.defaultConfig();
        File newFile = temporary.newFile();
        sameDiff.asFlatFile(newFile);
        SameDiffModelLoader sameDiffModelLoader = new SameDiffModelLoader(newFile, Arrays.asList("input1", "input2"), Arrays.asList("output"));


        sameDiffInferenceExecutioner.initialize(sameDiffModelLoader, parallelInferenceConfig);


        INDArray[] execute = sameDiffInferenceExecutioner.execute(new INDArray[]{input1Arr, input2Arr});
        assertEquals(outputs.values().iterator().next(), execute[0]);
    }
 
Example 14
Source File: TestSessions.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testSwitchSimple(){

        SameDiff sd = SameDiff.create();
        SDVariable x = sd.placeHolder("x", DataType.FLOAT, 3,3);
        SDVariable b = sd.placeHolder("b", DataType.BOOL);

        SDVariable[] switchOut = sd.switchOp(x,b); //Order: false then true
        SDVariable falsePlusOne = switchOut[0].add("addFalseBranch", 1);
        SDVariable truePlusTen = switchOut[1].add("addTrueBranch", 10.0);

        SDVariable merge = sd.merge(falsePlusOne, truePlusTen);

        INDArray xArr = Nd4j.create(DataType.FLOAT, 3,3);
        INDArray bArr = Nd4j.scalar(true);

        INDArray expTrue = xArr.add(10.0);
        INDArray expFalse = xArr.add(1.0);

        Map<String,INDArray> m = new HashMap<>();
        m.put("x", xArr);
        m.put("b", bArr);

        InferenceSession is = new InferenceSession(sd);
        String n = merge.name();

//        System.out.println("----------------------------------");
        Map<String,INDArray> outMap = is.output(Collections.singletonList(n), m, null, Collections.<String>emptyList(),
                null, At.defaultAt(Operation.TRAINING));
        assertEquals(1, outMap.size());
        assertEquals(expTrue, outMap.get(n));


//        System.out.println("----------------------------------");
        //Check false case:
        bArr.assign(0);
        is = new InferenceSession(sd);
        outMap = is.output(Collections.singletonList(n), m, null, Collections.<String>emptyList(), null, At.defaultAt(Operation.TRAINING));
        assertEquals(1, outMap.size());
        assertEquals(expFalse, outMap.get(n));
    }
 
Example 15
Source File: ProfilingListenerTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testProfilingListenerSimple() throws Exception {

        SameDiff sd = SameDiff.create();
        SDVariable in = sd.placeHolder("in", DataType.FLOAT, -1, 3);
        SDVariable label = sd.placeHolder("label", DataType.FLOAT, 1, 2);
        SDVariable w = sd.var("w", Nd4j.rand(DataType.FLOAT, 3, 2));
        SDVariable b = sd.var("b", Nd4j.rand(DataType.FLOAT, 1, 2));
        SDVariable sm = sd.nn.softmax("predictions", in.mmul("matmul", w).add("addbias", b));
        SDVariable loss = sd.loss.logLoss("loss", label, sm);

        INDArray i = Nd4j.rand(DataType.FLOAT, 1, 3);
        INDArray l = Nd4j.rand(DataType.FLOAT, 1, 2);


        File dir = testDir.newFolder();
        File f = new File(dir, "test.json");
        ProfilingListener listener = ProfilingListener.builder(f)
                .recordAll()
                .warmup(5)
                .build();

        sd.setListeners(listener);

        Map<String,INDArray> ph = new HashMap<>();
        ph.put("in", i);

        for( int x=0; x<10; x++ ) {
            sd.outputSingle(ph, "predictions");
        }

        String content = FileUtils.readFileToString(f, StandardCharsets.UTF_8);
//        System.out.println(content);
        assertFalse(content.isEmpty());

        //Should be 2 begins and 2 ends for each entry
        //5 warmup iterations, 5 profile iterations, x2 for both the op name and the op "instance" name
        String[] opNames = {"mmul", "add", "softmax"};
        for(String s : opNames){
            assertEquals(s, 10, StringUtils.countMatches(content, s));
        }


        System.out.println("///////////////////////////////////////////");
        ProfileAnalyzer.summarizeProfile(f, ProfileAnalyzer.ProfileFormat.SAMEDIFF);

    }
 
Example 16
Source File: ValidationUtilTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testValidateSameDiff() throws Exception {
        Nd4j.setDataType(DataType.FLOAT);

        File f = testDir.newFolder();
        SameDiff sd = SameDiff.create();
        SDVariable v = sd.placeHolder("x", DataType.FLOAT, 3,4);
        SDVariable loss = v.std(true);

        File fOrig = new File(f, "sd_fb.fb");
        sd.asFlatFile(fOrig);;


        //Test not existent file:
        File fNonExistent = new File("doesntExist.fb");
        ValidationResult vr0 = Nd4jValidator.validateSameDiffFlatBuffers(fNonExistent);
        assertFalse(vr0.isValid());
        assertEquals("SameDiff FlatBuffers file", vr0.getFormatType());
        assertTrue(vr0.getIssues().get(0), vr0.getIssues().get(0).contains("exist"));
//        System.out.println(vr0.toString());

        //Test empty file:
        File fEmpty = new File(f, "empty.fb");
        fEmpty.createNewFile();
        assertTrue(fEmpty.exists());
        ValidationResult vr1 = Nd4jValidator.validateSameDiffFlatBuffers(fEmpty);
        assertEquals("SameDiff FlatBuffers file", vr1.getFormatType());
        assertFalse(vr1.isValid());
        assertTrue(vr1.getIssues().get(0), vr1.getIssues().get(0).contains("empty"));
//        System.out.println(vr1.toString());

        //Test directory (not zip file)
        File directory = new File(f, "dir");
        boolean created = directory.mkdir();
        assertTrue(created);
        ValidationResult vr2 = Nd4jValidator.validateSameDiffFlatBuffers(directory);
        assertEquals("SameDiff FlatBuffers file", vr2.getFormatType());
        assertFalse(vr2.isValid());
        assertTrue(vr2.getIssues().get(0), vr2.getIssues().get(0).contains("directory"));
//        System.out.println(vr2.toString());

        //Test non-flatbuffers
        File fText = new File(f, "text.fb");
        FileUtils.writeStringToFile(fText, "Not a flatbuffers file :)", StandardCharsets.UTF_8);
        ValidationResult vr3 = Nd4jValidator.validateSameDiffFlatBuffers(fText);
        assertEquals("SameDiff FlatBuffers file", vr3.getFormatType());
        assertFalse(vr3.isValid());
        String s = vr3.getIssues().get(0);
        assertTrue(s, s.contains("FlatBuffers") && s.contains("SameDiff") && s.contains("corrupt"));
//        System.out.println(vr3.toString());

        //Test corrupted flatbuffers format:
        byte[] fbBytes = FileUtils.readFileToByteArray(fOrig);
        for( int i=0; i<30; i++ ){
            fbBytes[i] = (byte)('a' + i);
        }
        File fCorrupt = new File(f, "corrupt.fb");
        FileUtils.writeByteArrayToFile(fCorrupt, fbBytes);

        ValidationResult vr4 = Nd4jValidator.validateSameDiffFlatBuffers(fCorrupt);
        assertEquals("SameDiff FlatBuffers file", vr4.getFormatType());
        assertFalse(vr4.isValid());
        s = vr4.getIssues().get(0);
        assertTrue(s, s.contains("FlatBuffers") && s.contains("SameDiff") && s.contains("corrupt"));
//        System.out.println(vr4.toString());


        //Test valid npz format:
        ValidationResult vr5 = Nd4jValidator.validateSameDiffFlatBuffers(fOrig);
        assertEquals("SameDiff FlatBuffers file", vr5.getFormatType());
        assertTrue(vr5.isValid());
        assertNull(vr5.getIssues());
        assertNull(vr5.getException());
//        System.out.println(vr4.toString());
    }
 
Example 17
Source File: JsonModelServerTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSameDiffMnist() throws Exception {

    SameDiff sd = SameDiff.create();
    SDVariable in = sd.placeHolder("in", DataType.FLOAT, -1, 28*28);
    SDVariable w = sd.var("w", Nd4j.rand(DataType.FLOAT, 28*28, 10));
    SDVariable b = sd.var("b", Nd4j.rand(DataType.FLOAT, 1, 10));
    SDVariable sm = sd.nn.softmax("softmax", in.mmul(w).add(b), -1);

    val server = new JsonModelServer.Builder<float[], Integer>(sd)
            .outputSerializer( new IntSerde())
            .inputDeserializer(new FloatSerde())
            .inferenceAdapter(new InferenceAdapter<float[], Integer>() {
                @Override
                public MultiDataSet apply(float[] input) {
                    return new MultiDataSet(Nd4j.create(input, 1, input.length), null);
                }

                @Override
                public Integer apply(INDArray... nnOutput) {
                    return nnOutput[0].argMax().getInt(0);
                }
            })
            .orderedInputNodes("in")
            .orderedOutputNodes("softmax")
            .port(PORT+1)
            .build();

    val client = JsonRemoteInference.<float[], Integer>builder()
            .endpointAddress("http://localhost:" + (PORT+1) + "/v1/serving")
            .outputDeserializer(new IntSerde())
            .inputSerializer( new FloatSerde())
            .build();

    try{
        server.start();
        for( int i=0; i<10; i++ ){
            INDArray f = Nd4j.rand(DataType.FLOAT, 1, 28*28);
            INDArray exp = sd.output(Collections.singletonMap("in", f), "softmax").get("softmax");
            float[] fArr = f.toFloatVector();
            int out = client.predict(fArr);
            assertEquals(exp.argMax().getInt(0), out);
        }
    } finally {
        server.stop();
    }
}
 
Example 18
Source File: SameDiffRNNTestCases.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
        public Object getConfiguration() throws Exception {
            Nd4j.getRandom().setSeed(12345);


            int miniBatchSize = 10;
            int numLabelClasses = 6;
            int nIn = 60;
            int numUnits = 7;
            int timeSteps = 3;


            SameDiff sd = SameDiff.create();

            SDVariable in = sd.placeHolder("in", DataType.FLOAT, miniBatchSize, timeSteps, nIn);
            SDVariable label = sd.placeHolder("label", DataType.FLOAT, miniBatchSize, numLabelClasses);


            SDVariable cLast = sd.var("cLast", Nd4j.zeros(DataType.FLOAT, miniBatchSize, numUnits));
            SDVariable yLast = sd.var("yLast", Nd4j.zeros(DataType.FLOAT, miniBatchSize, numUnits));

            LSTMLayerConfig c = LSTMLayerConfig.builder()
                    .lstmdataformat(LSTMDataFormat.NTS)
                    .directionMode(LSTMDirectionMode.FWD)
                    .gateAct(LSTMActivations.SIGMOID)
                    .cellAct(LSTMActivations.TANH)
                    .outAct(LSTMActivations.TANH)
                    .retFullSequence(true)
                    .retLastC(true)
                    .retLastH(true)
                    .build();

            LSTMLayerOutputs outputs = new LSTMLayerOutputs(sd.rnn.lstmLayer(
                    in, cLast, yLast, null,
                    LSTMLayerWeights.builder()
                            .weights(sd.var("weights", Nd4j.rand(DataType.FLOAT, nIn, 4 * numUnits)))
                            .rWeights(sd.var("rWeights", Nd4j.rand(DataType.FLOAT, numUnits, 4 * numUnits)))
                            .peepholeWeights(sd.var("inputPeepholeWeights", Nd4j.rand(DataType.FLOAT, 3 * numUnits)))
                            .bias(sd.var("bias", Nd4j.rand(DataType.FLOAT, 4 * numUnits)))
                            .build(),
                    c), c);


//           Behaviour with default settings: 3d (time series) input with shape
//          [miniBatchSize, vectorSize, timeSeriesLength] -> 2d output [miniBatchSize, vectorSize]
            SDVariable layer0 = outputs.getOutput();

            SDVariable layer1 = layer0.mean(1);

            SDVariable w1 = sd.var("w1", Nd4j.rand(DataType.FLOAT, numUnits, numLabelClasses));
            SDVariable b1 = sd.var("b1", Nd4j.rand(DataType.FLOAT, numLabelClasses));


            SDVariable out = sd.nn.softmax("out", layer1.mmul(w1).add(b1));
            SDVariable loss = sd.loss.logLoss("loss", label, out);

            //Also set the training configuration:
            sd.setTrainingConfig(TrainingConfig.builder()
                    .updater(new Adam(5e-2))
                    .l1(1e-3).l2(1e-3)
                    .dataSetFeatureMapping("in")            //features[0] -> "in" placeholder
                    .dataSetLabelMapping("label")           //labels[0]   -> "label" placeholder
                    .build());

            return sd;

        }
 
Example 19
Source File: TestSessions.java    From deeplearning4j with Apache License 2.0 3 votes vote down vote up
@Test
public void testInferenceSessionBasic(){
    //So far: trivial test to check execution order

    SameDiff sd = SameDiff.create();

    SDVariable ph1 = sd.placeHolder("x", DataType.FLOAT, 3,4);
    SDVariable ph2 = sd.placeHolder("y", DataType.FLOAT, 1,4);

    SDVariable out = ph1.add("out", ph2);

    //NOTE: normally sessions are internal and completely hidden from users

    InferenceSession is = new InferenceSession(sd);

    INDArray x = Nd4j.linspace(1, 12, 12).castTo(DataType.FLOAT).reshape(3,4);
    INDArray y = Nd4j.linspace(0.1, 0.4, 4, DataType.DOUBLE).castTo(DataType.FLOAT).reshape(1,4);

    INDArray outExp = x.addRowVector(y);

    Map<String,INDArray> m = new HashMap<>();
    m.put("x", x);
    m.put("y", y);

    Map<String,INDArray> outMap = is.output(Collections.singletonList("out"), m, null,
            Collections.<String>emptyList(), null, At.defaultAt(Operation.TRAINING));

    assertEquals(1, outMap.size());
    assertEquals(outExp, outMap.get("out"));
}