org.apache.zeppelin.interpreter.InterpreterResult Java Examples

The following examples show how to use org.apache.zeppelin.interpreter.InterpreterResult. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ElasticsearchInterpreterTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Theory
public void testCount(ElasticsearchInterpreter interpreter) {
  final InterpreterContext ctx = buildContext("testCount");

  InterpreterResult res = interpreter.interpret("count /unknown", ctx);
  assertEquals(Code.ERROR, res.code());

  res = interpreter.interpret("count /logs", ctx);
  assertEquals(Code.SUCCESS, res.code());
  assertEquals("50", res.message().get(0).getData());
  assertNotNull(ctx.getAngularObjectRegistry().get("count_testCount", null, null));
  assertEquals(50L, ctx.getAngularObjectRegistry().get("count_testCount", null, null).get());

  res = interpreter.interpret("count /logs { \"query\": { \"match\": { \"status\": 500 } } }",
          ctx);
  assertEquals(Code.SUCCESS, res.code());
}
 
Example #2
Source File: AlluxioInterpreterTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Test
public void copyFromLocalLargeTest() throws IOException, AlluxioException {
  File testFile = new File(mLocalAlluxioCluster.getAlluxioHome() + "/testFile");
  testFile.createNewFile();
  FileOutputStream fos = new FileOutputStream(testFile);
  byte[] toWrite = BufferUtils.getIncreasingByteArray(SIZE_BYTES);
  fos.write(toWrite);
  fos.close();

  InterpreterResult output = alluxioInterpreter.interpret("copyFromLocal " +
          testFile.getAbsolutePath() + " /testFile", null);
  Assert.assertEquals(
          "Copied " + testFile.getAbsolutePath() + " to /testFile\n\n",
          output.message().get(0).getData());

  long fileLength = fs.getStatus(new AlluxioURI("/testFile")).getLength();
  Assert.assertEquals(SIZE_BYTES, fileLength);

  FileInStream fStream = fs.openFile(new AlluxioURI("/testFile"));
  byte[] read = new byte[SIZE_BYTES];
  fStream.read(read);
  Assert.assertTrue(BufferUtils.equalIncreasingByteArray(SIZE_BYTES, read));
}
 
Example #3
Source File: CassandraInterpreterTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Test
public void should_extract_variable_from_statement() throws Exception {
  //Given
  AngularObjectRegistry angularObjectRegistry = new AngularObjectRegistry("cassandra", null);
  GUI gui = new GUI();
  gui.textbox("login", "hsue");
  gui.textbox("age", "27");
  InterpreterContext intrContext = InterpreterContext.builder()
          .setParagraphTitle("Paragraph1")
          .setAngularObjectRegistry(angularObjectRegistry)
          .setGUI(gui)
          .build();

  String queries = "@prepare[test_insert_with_variable]=" +
          "INSERT INTO zeppelin.users(login,firstname,lastname,age) VALUES(?,?,?,?)\n" +
          "@bind[test_insert_with_variable]='{{login=hsue}}','Helen','SUE',{{age=27}}\n" +
          "SELECT firstname,lastname,age FROM zeppelin.users WHERE login='hsue';";
  //When
  final InterpreterResult actual = interpreter.interpret(queries, intrContext);

  //Then
  assertThat(actual.code()).isEqualTo(Code.SUCCESS);
  assertThat(actual.message().get(0).getData()).isEqualTo("firstname\tlastname\tage\n" +
          "Helen\tSUE\t27\n");
}
 
Example #4
Source File: ShinyInterpreter.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Override
public InterpreterResult internalInterpret(String st, InterpreterContext context)
        throws InterpreterException {
  String shinyApp = context.getStringLocalProperty("app", DEFAULT_APP_NAME);
  String shinyType = context.getStringLocalProperty("type", "");
  IRInterpreter irInterpreter = getIRInterpreter(shinyApp);
  if (StringUtils.isBlank(shinyType)) {
    return irInterpreter.internalInterpret(st, context);
  } else if (shinyType.equals("run")) {
    try {
      return irInterpreter.runShinyApp(context);
    } catch (IOException e) {
      throw new InterpreterException(e);
    }
  } else if (shinyType.equals("ui")) {
    return irInterpreter.shinyUI(st, context);
  } else if (shinyType.equals("server")) {
    return irInterpreter.shinyServer(st, context);
  } else {
    throw new InterpreterException("Unknown shiny type: " + shinyType);
  }
}
 
Example #5
Source File: JDBCInterpreterTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Test
public void testStatementPrecode() throws IOException, InterpreterException {
  Properties properties = new Properties();
  properties.setProperty("default.driver", "org.h2.Driver");
  properties.setProperty("default.url", getJdbcConnection());
  properties.setProperty("default.user", "");
  properties.setProperty("default.password", "");
  properties.setProperty(DEFAULT_STATEMENT_PRECODE, "set @v='statement'");
  JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(properties);
  jdbcInterpreter.open();

  String sqlQuery = "select @v";

  InterpreterResult interpreterResult = jdbcInterpreter.interpret(sqlQuery, context);

  assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
  List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage();
  assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType());
  assertEquals("@V\nstatement\n", resultMessages.get(0).getData());
}
 
Example #6
Source File: FlexmarkParserTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Test
public void testHeader() {
  InterpreterResult r1 = md.interpret("# H1", null);
  assertEquals(wrapWithMarkdownClassDiv("<h1>H1</h1>\n"), r1.message().get(0).getData());

  InterpreterResult r2 = md.interpret("## H2", null);
  assertEquals(wrapWithMarkdownClassDiv("<h2>H2</h2>\n"), r2.message().get(0).getData());

  InterpreterResult r3 = md.interpret("### H3", null);
  assertEquals(wrapWithMarkdownClassDiv("<h3>H3</h3>\n"), r3.message().get(0).getData());

  InterpreterResult r4 = md.interpret("#### H4", null);
  assertEquals(wrapWithMarkdownClassDiv("<h4>H4</h4>\n"), r4.message().get(0).getData());

  InterpreterResult r5 = md.interpret("##### H5", null);
  assertEquals(wrapWithMarkdownClassDiv("<h5>H5</h5>\n"), r5.message().get(0).getData());

  InterpreterResult r6 = md.interpret("###### H6", null);
  assertEquals(wrapWithMarkdownClassDiv("<h6>H6</h6>\n"), r6.message().get(0).getData());

  InterpreterResult r7 = md.interpret("Alt-H1\n" + "======", null);
  assertEquals(wrapWithMarkdownClassDiv("<h1>Alt-H1</h1>\n"), r7.message().get(0).getData());

  InterpreterResult r8 = md.interpret("Alt-H2\n" + "------", null);
  assertEquals(wrapWithMarkdownClassDiv("<h2>Alt-H2</h2>\n"), r8.message().get(0).getData());
}
 
Example #7
Source File: PySparkInterpreterMatplotlibTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
/**
 * This code is mainly copied from RemoteInterpreterServer.java which
 * normally handles this in real use cases.
 */
@Override
public InterpreterResult interpret(String st, InterpreterContext context) throws InterpreterException {
  context.out.clear();
  InterpreterResult result = super.interpret(st, context);
  List<InterpreterResultMessage> resultMessages = null;
  try {
    context.out.flush();
    resultMessages = context.out.toInterpreterResultMessage();
  } catch (IOException e) {
    e.printStackTrace();
  }
  resultMessages.addAll(result.message());

  return new InterpreterResult(result.code(), resultMessages);
}
 
Example #8
Source File: FlinkInterpreter.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Override
public InterpreterResult interpret(String st, InterpreterContext context)
    throws InterpreterException {
  LOGGER.debug("Interpret code: " + st);
  this.z.setInterpreterContext(context);
  this.z.setGui(context.getGui());
  this.z.setNoteGui(context.getNoteGui());

  // set ClassLoader of current Thread to be the ClassLoader of Flink scala-shell,
  // otherwise codegen will fail to find classes defined in scala-shell
  ClassLoader originClassLoader = Thread.currentThread().getContextClassLoader();
  try {
    Thread.currentThread().setContextClassLoader(getFlinkScalaShellLoader());
    createPlannerAgain();
    setParallelismIfNecessary(context);
    setSavePointIfNecessary(context);
    return innerIntp.interpret(st, context);
  } finally {
    Thread.currentThread().setContextClassLoader(originClassLoader);
  }
}
 
Example #9
Source File: JDBCInterpreterTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Test
public void testIncorrectStatementPrecode() throws IOException,
        InterpreterException {
  Properties properties = new Properties();
  properties.setProperty("default.driver", "org.h2.Driver");
  properties.setProperty("default.url", getJdbcConnection());
  properties.setProperty("default.user", "");
  properties.setProperty("default.password", "");
  properties.setProperty(DEFAULT_STATEMENT_PRECODE, "set incorrect");
  JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(properties);
  jdbcInterpreter.open();

  String sqlQuery = "select 1";

  InterpreterResult interpreterResult = jdbcInterpreter.interpret(sqlQuery, context);

  assertEquals(InterpreterResult.Code.ERROR, interpreterResult.code());
  assertEquals(InterpreterResult.Type.TEXT, interpreterResult.message().get(0).getType());
  assertTrue(interpreterResult.toString(),
          interpreterResult.message().get(0).getData().contains("Syntax error"));
}
 
Example #10
Source File: RemoteAngularObjectTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Test
public void testAngularObjectRemovalOnZeppelinServerSide() throws InterruptedException, InterpreterException {
  // test if angularobject removal from server side propagate to interpreter process's registry.
  // will happen when notebook is removed.

  InterpreterResult ret = intp.interpret("get", context);
  Thread.sleep(500); // waitFor eventpoller pool event
  String[] result = ret.message().get(0).getData().split(" ");
  assertEquals("0", result[0]); // size of registry

  // create object
  ret = intp.interpret("add n1 v1", context);
  Thread.sleep(500);
  result = ret.message().get(0).getData().split(" ");
  assertEquals("1", result[0]); // size of registry
  assertEquals("v1", localRegistry.get("n1", "note", null).get());

  // remove object in local registry.
  localRegistry.removeAndNotifyRemoteProcess("n1", "note", null);
  ret = intp.interpret("get", context);
  Thread.sleep(500); // waitFor eventpoller pool event
  result = ret.message().get(0).getData().split(" ");
  assertEquals("0", result[0]); // size of registry
}
 
Example #11
Source File: JavaInterpreter.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Override
public InterpreterResult interpret(String code, InterpreterContext context) {

  // choosing new name to class containing Main method
  String generatedClassName = "C" + UUID.randomUUID().toString().replace("-", "");

  try {
    String res = StaticRepl.execute(generatedClassName, code);
    return new InterpreterResult(InterpreterResult.Code.SUCCESS, res);
  } catch (Exception e) {
    logger.error("Exception in Interpreter while interpret", e);
    return new InterpreterResult(InterpreterResult.Code.ERROR, e.getMessage());

  }

}
 
Example #12
Source File: RemoteInterpreterOutputTestStream.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Test
public void testOutputType() throws InterpreterException {
  RemoteInterpreter intp = (RemoteInterpreter) interpreterSetting.getInterpreter("user1", "note1", "mock_stream");

  InterpreterResult ret = intp.interpret("SUCCESS:%html hello:", createInterpreterContext());
  assertEquals(InterpreterResult.Type.HTML, ret.message().get(0).getType());
  assertEquals("hello", ret.message().get(0).getData());

  ret = intp.interpret("SUCCESS:%html\nhello:", createInterpreterContext());
  assertEquals(InterpreterResult.Type.HTML, ret.message().get(0).getType());
  assertEquals("hello", ret.message().get(0).getData());

  ret = intp.interpret("SUCCESS:%html hello:%angular world", createInterpreterContext());
  assertEquals(InterpreterResult.Type.HTML, ret.message().get(0).getType());
  assertEquals("hello", ret.message().get(0).getData());
  assertEquals(InterpreterResult.Type.ANGULAR, ret.message().get(1).getType());
  assertEquals("world", ret.message().get(1).getData());
}
 
Example #13
Source File: FlinkStreamSqlInterpreterTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Test
public void testResumeStreamSqlFromInvalidSavePointPath() throws IOException, InterpreterException, InterruptedException, TimeoutException {
  String initStreamScalaScript = getInitStreamScript(1000);
  InterpreterResult result = flinkInterpreter.interpret(initStreamScalaScript,
          getInterpreterContext());
  assertEquals(InterpreterResult.Code.SUCCESS, result.code());

  File savepointPath = FileUtils.getTempDirectory();
  InterpreterContext context = getInterpreterContext();
  context.getLocalProperties().put("type", "update");
  context.getLocalProperties().put("savepointPath", savepointPath.getAbsolutePath());
  context.getLocalProperties().put("parallelism", "1");
  context.getLocalProperties().put("maxParallelism", "10");
  InterpreterResult result2 = sqlInterpreter.interpret("select url, count(1) as pv from " +
          "log group by url", context);

  // due to invalid savepointPath, failed to submit job and throw exception
  assertEquals(InterpreterResult.Code.ERROR, result2.code());
  List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage();
  assertTrue(resultMessages.toString().contains("Failed to submit job."));

}
 
Example #14
Source File: HDFSFileInterpreterTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Test
public void testMaxLength() {
  HDFSFileInterpreter t = new MockHDFSFileInterpreter(new Properties());
  t.open();
  InterpreterResult result = t.interpret("ls -l /", null);
  String lineSeparator = "\n";
  int fileStatusLength = MockFileSystem.FILE_STATUSES.split(lineSeparator).length;
  assertEquals(result.message().get(0).getData().split(lineSeparator).length, fileStatusLength);
  t.close();

  Properties properties = new Properties();
  final int maxLength = fileStatusLength - 2;
  properties.setProperty("hdfs.maxlength", String.valueOf(maxLength));
  HDFSFileInterpreter t1 = new MockHDFSFileInterpreter(properties);
  t1.open();
  InterpreterResult result1 = t1.interpret("ls -l /", null);
  assertEquals(result1.message().get(0).getData().split(lineSeparator).length, maxLength);
  t1.close();
}
 
Example #15
Source File: RemoteAngularObjectTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Test
public void testAngularObjectAddOnZeppelinServerSide() throws InterruptedException, InterpreterException {
  // test if angularobject add from server side propagate to interpreter process's registry.
  // will happen when zeppelin server loads notebook and restore the object into registry

  InterpreterResult ret = intp.interpret("get", context);
  Thread.sleep(500); // waitFor eventpoller pool event
  String[] result = ret.message().get(0).getData().split(" ");
  assertEquals("0", result[0]); // size of registry

  // create object
  localRegistry.addAndNotifyRemoteProcess("n1", "v1", "note", null);

  // get from remote registry
  ret = intp.interpret("get", context);
  Thread.sleep(500); // waitFor eventpoller pool event
  result = ret.message().get(0).getData().split(" ");
  assertEquals("1", result[0]); // size of registry
}
 
Example #16
Source File: ElasticsearchInterpreter.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
/**
 * Processes a "count" request.
 *
 * @param urlItems Items of the URL
 * @param data May contains the JSON of the request
 * @param interpreterContext Instance of the context
 * @return Result of the count request, it contains the total hits
 */
private InterpreterResult processCount(String[] urlItems, String data,
    InterpreterContext interpreterContext) {
  if (urlItems.length > 2) {
    return new InterpreterResult(InterpreterResult.Code.ERROR,
        "Bad URL (it should be /index1,index2,.../type1,type2,...)");
  }

  final ActionResponse response = searchData(urlItems, data, 0);

  addAngularObject(interpreterContext, "count", response.getTotalHits());

  return new InterpreterResult(
      InterpreterResult.Code.SUCCESS,
      InterpreterResult.Type.TEXT,
      "" + response.getTotalHits());
}
 
Example #17
Source File: RemoteInterpreterServer.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
private RemoteInterpreterResult convert(InterpreterResult result,
                                        Map<String, Object> config, GUI gui, GUI noteGui) {

  List<RemoteInterpreterResultMessage> msg = new LinkedList<>();
  for (InterpreterResultMessage m : result.message()) {
    msg.add(new RemoteInterpreterResultMessage(
        m.getType().name(),
        m.getData()));
  }

  return new RemoteInterpreterResult(
      result.code().name(),
      msg,
      gson.toJson(config),
      gui.toJson(),
      noteGui.toJson());
}
 
Example #18
Source File: SparkInterpreterTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Test
public void testDisableSparkUI_1() throws InterpreterException {
  Properties properties = new Properties();
  properties.setProperty(SparkStringConstants.MASTER_PROP_NAME, "local");
  properties.setProperty(SparkStringConstants.APP_NAME_PROP_NAME, "test");
  properties.setProperty("zeppelin.spark.maxResult", "100");
  properties.setProperty("spark.ui.enabled", "false");
  // disable color output for easy testing
  properties.setProperty("zeppelin.spark.scala.color", "false");
  properties.setProperty("zeppelin.spark.deprecatedMsg.show", "false");

  interpreter = new SparkInterpreter(properties);
  interpreter.setInterpreterGroup(mock(InterpreterGroup.class));
  InterpreterContext.set(getInterpreterContext());
  interpreter.open();

  InterpreterContext context = getInterpreterContext();
  InterpreterResult result = interpreter.interpret("sc.range(1, 10).sum", context);
  assertEquals(InterpreterResult.Code.SUCCESS, result.code());

  // spark job url is not sent
  verify(mockRemoteEventClient, never()).onParaInfosReceived(any(Map.class));
}
 
Example #19
Source File: JavaInterpreterUtilsTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Test
public void testStaticReplWithDisplayTableFromSimpleMapUtilReturnTableType() {

    StringWriter writer = new StringWriter();
    PrintWriter out = new PrintWriter(writer);
    out.println("import java.util.HashMap;");
    out.println("import java.util.Map;");
    out.println("import org.apache.zeppelin.java.JavaInterpreterUtils;");
    out.println("public class HelloWorld {");
    out.println("  public static void main(String args[]) {");
    out.println("    Map<String, Long> counts = new HashMap<>();");
    out.println("    counts.put(\"hello\",4L);");
    out.println("    counts.put(\"world\",5L);");
    out.println("    System.out.println(JavaInterpreterUtils.displayTableFromSimpleMap(\"Word\", \"Count\", counts));");
    out.println("  }");
    out.println("}");
    out.close();

    InterpreterResult res = java.interpret(writer.toString(), context);

    assertEquals(InterpreterResult.Code.SUCCESS, res.code());
    assertEquals(InterpreterResult.Type.TABLE, res.message().get(0).getType());
}
 
Example #20
Source File: IRInterpreter.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
public InterpreterResult runShinyApp(InterpreterContext context)
        throws IOException, InterpreterException {
  // redirect R kernel process to InterpreterOutput of current paragraph
  // because the error message after shiny app launched is printed in R kernel process
  getKernelProcessLauncher().setRedirectedContext(context);
  try {
    StringBuilder builder = new StringBuilder("library(shiny)\n");
    String host = RemoteInterpreterUtils.findAvailableHostAddress();
    int port = RemoteInterpreterUtils.findRandomAvailablePortOnAllLocalInterfaces();
    builder.append("runApp(appDir='" + shinyAppFolder.getAbsolutePath() + "', " +
            "port=" + port + ", host='" + host + "', launch.browser=FALSE)");
    // shiny app will launch and block there until user cancel the paragraph.
    LOGGER.info("Run shiny app code: " + builder.toString());
    return internalInterpret(builder.toString(), context);
  } finally {
    getKernelProcessLauncher().setRedirectedContext(null);
  }
}
 
Example #21
Source File: NotebookTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Test
public void testInvalidInterpreter() throws IOException, InterruptedException {
  Note note = notebook.createNote("note1", anonymous);
  Paragraph p1 = note.addNewParagraph(AuthenticationInfo.ANONYMOUS);
  p1.setText("%invalid abc");
  p1.setAuthenticationInfo(anonymous);
  note.run(p1.getId());

  Thread.sleep(2 * 1000);
  assertEquals(p1.getStatus(), Status.ERROR);
  InterpreterResult result = p1.getReturn();
  assertEquals(InterpreterResult.Code.ERROR, result.code());
  assertEquals("Interpreter invalid not found", result.message().get(0).getData());
  assertNull(p1.getDateStarted());
  notebook.removeNote(note, anonymous);
}
 
Example #22
Source File: Neo4jCypherInterpreterTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Test
public void testPoint() {
  InterpreterResult result = interpreter.interpret("RETURN point({ x:3, y:0 }) AS cartesian_2d,"
          + "point({ x:0, y:4, z:1 }) AS cartesian_3d,"
          + "point({ latitude: 12, longitude: 56 }) AS geo_2d,"
          + "point({ latitude: 12, longitude: 56, height: 1000 }) AS geo_3d", context);
  assertEquals(Code.SUCCESS, result.code());
  assertEquals("cartesian_2d\tcartesian_3d\tgeo_2d\tgeo_3d\n" +
          "Point{srid=7203, x=3.0, y=0.0}\tPoint{srid=9157, x=0.0, y=4.0, z=1.0}\t" +
          "Point{srid=4326, x=56.0, y=12.0}\tPoint{srid=4979, x=56.0, y=12.0, z=1000.0}\n",
          result.toString().replace(TABLE_RESULT_PREFIX, StringUtils.EMPTY));

  result = interpreter.interpret(
          "WITH point({ latitude: 12, longitude: 56, height: 1000 }) AS geo_3d " +
          "RETURN geo_3d.latitude AS latitude, geo_3d.longitude AS longitude, " +
                  "geo_3d.height AS height", context);
  assertEquals(Code.SUCCESS, result.code());
  assertEquals("latitude\tlongitude\theight\n" +
                  "12.0\t56.0\t1000.0\n",
          result.toString().replace(TABLE_RESULT_PREFIX, StringUtils.EMPTY));
}
 
Example #23
Source File: KotlinInterpreter.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
@Override
public InterpreterResult interpret(String code,
                                   InterpreterContext context) throws InterpreterException{
  // saving job's running thread for cancelling
  Job<?> runningJob = getRunningJob(context.getParagraphId());
  if (runningJob != null) {
    runningJob.info().put("CURRENT_THREAD", Thread.currentThread());
  }

  return runWithOutput(code, context.out);
}
 
Example #24
Source File: PegdownParserTest.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
@Test
public void testStrikethrough() {
  InterpreterResult result = md.interpret("This is ~~deleted~~ text", null);
  assertEquals(
      wrapWithMarkdownClassDiv("<p>This is <del>deleted</del> text</p>"),
          result.message().get(0).getData());
}
 
Example #25
Source File: KotlinInterpreter.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
/**
 * Kotlin interpreter uses System.out for printing, so it is redirected to InterpreterOutput.
 * Note that Scala's Console class needs separate output redirection
 */
private InterpreterResult runWithOutput(String code, InterpreterOutput out) {
  this.out.setInterpreterOutput(out);

  PrintStream oldOut = System.out;
  PrintStream newOut = (out != null) ? new PrintStream(out) : null;
  try {
    System.setOut(newOut);
    return interpreter.eval(code);
  } finally {
    System.setOut(oldOut);
  }
}
 
Example #26
Source File: CassandraInterpreterTest.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
@Test
public void should_describe_udt_withing_logged_in_keyspace() throws Exception {
  //Given
  String query = "USE live_data;\n" +
          "DESCRIBE TYPE address;";
  final String expected = reformatHtml(readTestResource(
          "/scalate/DescribeType_live_data_address_within_current_keyspace.html"));

  //When
  final InterpreterResult actual = interpreter.interpret(query, intrContext);

  //Then
  assertThat(actual.code()).isEqualTo(Code.SUCCESS);
  assertThat(reformatHtml(actual.message().get(0).getData())).isEqualTo(expected);
}
 
Example #27
Source File: LivySparkSQLInterpreter.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
@Override
public void open() throws InterpreterException {
  this.sparkInterpreter = getInterpreterInTheSameSessionByClassName(LivySparkInterpreter.class);
  // As we don't know whether livyserver use spark2 or spark1, so we will detect SparkSession
  // to judge whether it is using spark2.
  try {
    InterpreterContext context = InterpreterContext.builder()
        .setInterpreterOut(new InterpreterOutput(null))
        .build();
    InterpreterResult result = sparkInterpreter.interpret("spark", context);
    if (result.code() == InterpreterResult.Code.SUCCESS &&
        result.message().get(0).getData().contains("org.apache.spark.sql.SparkSession")) {
      LOGGER.info("SparkSession is detected so we are using spark 2.x for session {}",
          sparkInterpreter.getSessionInfo().id);
      isSpark2 = true;
    } else {
      // spark 1.x
      result = sparkInterpreter.interpret("sqlContext", context);
      if (result.code() == InterpreterResult.Code.SUCCESS) {
        LOGGER.info("sqlContext is detected.");
      } else if (result.code() == InterpreterResult.Code.ERROR) {
        // create SqlContext if it is not available, as in livy 0.2 sqlContext
        // is not available.
        LOGGER.info("sqlContext is not detected, try to create SQLContext by ourselves");
        result = sparkInterpreter.interpret(
            "val sqlContext = new org.apache.spark.sql.SQLContext(sc)\n"
                + "import sqlContext.implicits._", context);
        if (result.code() == InterpreterResult.Code.ERROR) {
          throw new LivyException("Fail to create SQLContext," +
              result.message().get(0).getData());
        }
      }
    }
  } catch (LivyException e) {
    throw new RuntimeException("Fail to Detect SparkVersion", e);
  }
}
 
Example #28
Source File: IgniteInterpreterTest.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
@Test
public void testInterpret() {
  String sizeVal = "size";

  InterpreterResult result = intp.interpret("import org.apache.ignite.IgniteCache\n" +
          "val " + sizeVal + " = ignite.cluster().nodes().size()", INTP_CONTEXT);

  assertEquals(InterpreterResult.Code.SUCCESS, result.code());
  assertTrue(result.message().get(0).getData().contains(sizeVal + ": Int = " +
          ignite.cluster().nodes().size()));

  result = intp.interpret("\"123\"\n  .toInt", INTP_CONTEXT);
  assertEquals(InterpreterResult.Code.SUCCESS, result.code());
}
 
Example #29
Source File: NotebookRestApi.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
/**
 * Run synchronously a paragraph REST API.
 *
 * @param noteId      - noteId
 * @param paragraphId - paragraphId
 * @param message     - JSON with params if user wants to update dynamic form's value
 *                    null, empty string, empty json if user doesn't want to update
 * @return JSON with status.OK
 * @throws IOException
 * @throws IllegalArgumentException
 */
@POST
@Path("run/{noteId}/{paragraphId}")
@ZeppelinApi
public Response runParagraphSynchronously(@PathParam("noteId") String noteId,
                                          @PathParam("paragraphId") String paragraphId,
                                          String message)
    throws IOException, IllegalArgumentException {
  LOG.info("run paragraph synchronously {} {} {}", noteId, paragraphId, message);

  Note note = notebook.getNote(noteId);
  checkIfNoteIsNotNull(note);
  Paragraph paragraph = note.getParagraph(paragraphId);
  checkIfParagraphIsNotNull(paragraph);

  Map<String, Object> params = new HashMap<>();
  if (!StringUtils.isEmpty(message)) {
    ParametersRequest request =
        ParametersRequest.fromJson(message);
    params = request.getParams();
  }

  if (notebookService.runParagraph(noteId, paragraphId, paragraph.getTitle(),
      paragraph.getText(), params,
      new HashMap<>(), false, true, getServiceContext(), new RestServiceCallback<>())) {
    note = notebookService.getNote(noteId, getServiceContext(), new RestServiceCallback<>());
    Paragraph p = note.getParagraph(paragraphId);
    InterpreterResult result = p.getReturn();
    if (result.code() == InterpreterResult.Code.SUCCESS) {
      return new JsonResponse<>(Status.OK, result).build();
    } else {
      return new JsonResponse<>(Status.INTERNAL_SERVER_ERROR, result).build();
    }
  } else {
    return new JsonResponse<>(Status.INTERNAL_SERVER_ERROR, "Fail to run paragraph").build();
  }
}
 
Example #30
Source File: CassandraInterpreterTest.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
@Test
public void should_describe_all_tables() throws Exception {
  //Given
  String query = "DESCRIBE TABLES;";
  final String expected = reformatHtml(readTestResource(
          "/scalate/DescribeTables.html"));

  //When
  final InterpreterResult actual = interpreter.interpret(query, intrContext);

  //Then
  assertThat(actual.code()).isEqualTo(Code.SUCCESS);
  assertThat(reformatHtml(actual.message().get(0).getData())).isEqualTo(expected);
}