org.apache.zeppelin.interpreter.InterpreterContext Java Examples

The following examples show how to use org.apache.zeppelin.interpreter.InterpreterContext. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FileSystemRecoveryStorageTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Test
public void testSingleInterpreterProcess() throws InterpreterException, IOException {
  InterpreterSetting interpreterSetting = interpreterSettingManager.getByName("test");
  interpreterSetting.getOption().setPerUser(InterpreterOption.SHARED);

  Interpreter interpreter1 = interpreterSetting.getDefaultInterpreter("user1", "note1");
  RemoteInterpreter remoteInterpreter1 = (RemoteInterpreter) interpreter1;
  InterpreterContext context1 = InterpreterContext.builder()
      .setNoteId("noteId")
      .setParagraphId("paragraphId")
      .build();
  remoteInterpreter1.interpret("hello", context1);

  assertEquals(1, interpreterSettingManager.getRecoveryStorage().restore().size());

  interpreterSetting.close();
  assertEquals(0, interpreterSettingManager.getRecoveryStorage().restore().size());
}
 
Example #2
Source File: SubmarineShellInterpreter.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Override
public InterpreterResult internalInterpret(String cmd, InterpreterContext intpContext) {
  setParagraphConfig(intpContext);

  // algorithm path & checkpoint path support replaces ${username} with real user name
  String algorithmPath = properties.getProperty(SUBMARINE_ALGORITHM_HDFS_PATH, "");
  if (algorithmPath.contains(USERNAME_SYMBOL)) {
    algorithmPath = algorithmPath.replace(USERNAME_SYMBOL, userName);
    properties.setProperty(SUBMARINE_ALGORITHM_HDFS_PATH, algorithmPath);
  }
  String checkpointPath = properties.getProperty(
      TF_CHECKPOINT_PATH, "");
  if (checkpointPath.contains(USERNAME_SYMBOL)) {
    checkpointPath = checkpointPath.replace(USERNAME_SYMBOL, userName);
    properties.setProperty(TF_CHECKPOINT_PATH, checkpointPath);
  }

  return super.internalInterpret(cmd, intpContext);
}
 
Example #3
Source File: FlinkStreamSqlInterpreterTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Test
public void testSingleStreamSql() throws IOException, InterpreterException {
  String initStreamScalaScript = getInitStreamScript(100);
  InterpreterContext context = getInterpreterContext();
  InterpreterResult result = flinkInterpreter.interpret(initStreamScalaScript, context);
  assertEquals(InterpreterResult.Code.SUCCESS, result.code());

  context = getInterpreterContext();
  context.getLocalProperties().put("type", "single");
  context.getLocalProperties().put("template", "Total Count: {1} <br/> {0}");
  result = sqlInterpreter.interpret("select max(rowtime), count(1) " +
          "from log", context);
  assertEquals(InterpreterResult.Code.SUCCESS, result.code());
  List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage();
  assertEquals(InterpreterResult.Type.ANGULAR, resultMessages.get(0).getType());
  assertTrue(resultMessages.toString(),
          resultMessages.get(0).getData().contains("Total Count"));
}
 
Example #4
Source File: RemoteAngularObjectTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Before
public void setUp() throws Exception {
  super.setUp();

  onAdd = new AtomicInteger(0);
  onUpdate = new AtomicInteger(0);
  onRemove = new AtomicInteger(0);

  interpreterSetting = interpreterSettingManager.getInterpreterSettingByName("test");
  intp = (RemoteInterpreter) interpreterSetting.getInterpreter("user1", "note1", "mock_ao");
  localRegistry = (RemoteAngularObjectRegistry) intp.getInterpreterGroup().getAngularObjectRegistry();

  context = InterpreterContext.builder()
      .setNoteId("note")
      .setParagraphId("id")
      .setAngularObjectRegistry(new AngularObjectRegistry(intp.getInterpreterGroup().getId(), null))
      .setResourcePool(new LocalResourcePool("pool1"))
      .build();

  intp.open();

}
 
Example #5
Source File: RemoteInterpreterServer.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
private InterpreterContext convert(RemoteInterpreterContext ric, InterpreterOutput output) {
  return InterpreterContext.builder()
      .setNoteId(ric.getNoteId())
      .setNoteName(ric.getNoteName())
      .setParagraphId(ric.getParagraphId())
      .setReplName(ric.getReplName())
      .setParagraphTitle(ric.getParagraphTitle())
      .setParagraphText(ric.getParagraphText())
      .setLocalProperties(ric.getLocalProperties())
      .setAuthenticationInfo(AuthenticationInfo.fromJson(ric.getAuthenticationInfo()))
      .setGUI(GUI.fromJson(ric.getGui()))
      .setConfig(gson.fromJson(ric.getConfig(),
                 new TypeToken<Map<String, Object>>() {}.getType()))
      .setNoteGUI(GUI.fromJson(ric.getNoteGui()))
      .setAngularObjectRegistry(interpreterGroup.getAngularObjectRegistry())
      .setResourcePool(interpreterGroup.getResourcePool())
      .setInterpreterOut(output)
      .setIntpEventClient(intpEventClient)
      .setProgressMap(progressMap)
      .build();
}
 
Example #6
Source File: BigQueryInterpreter.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Override
public void cancel(InterpreterContext context) {
  logger.info("Trying to Cancel current query statement.");

  if (service != null && jobId != null && projectId != null) {
    try {
      Bigquery.Jobs.Cancel request = service.jobs().cancel(projectId, jobId);
      JobCancelResponse response = request.execute();
      jobId = null;
      logger.info("Query Execution cancelled");
    } catch (IOException ex) {
      logger.error("Could not cancel the SQL execution");
    }
  } else {
    logger.info("Query Execution was already cancelled");
  }
}
 
Example #7
Source File: SubmarineInterpreterTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Test
public void testDashboard() throws InterpreterException {
  String script = "dashboard";
  InterpreterContext intpContext = getIntpContext();

  InterpreterResult interpreterResult = submarineIntp.interpret(script, intpContext);
  String message = interpreterResult.toJson();
  LOGGER.info(message);

  assertEquals(interpreterResult.code(), InterpreterResult.Code.SUCCESS);
  assertTrue(intpContext.out().size() >= 2);

  String dashboardTemplate = intpContext.out().getOutputAt(0).toString();
  LOGGER.info(dashboardTemplate);
  assertTrue("Did not generate template!", (dashboardTemplate.length() > 500));
}
 
Example #8
Source File: RemoteInterpreterServer.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Override
public void reconnect(String host, int port) throws TException {
  try {
    LOGGER.info("Reconnect to this interpreter process from {}:{}", host, port);
    this.intpEventServerHost = host;
    this.intpEventServerPort = port;
    intpEventClient = new RemoteInterpreterEventClient(intpEventServerHost, intpEventServerPort);
    intpEventClient.setIntpGroupId(interpreterGroupId);

    this.angularObjectRegistry = new AngularObjectRegistry(interpreterGroup.getId(), intpEventClient);
    this.resourcePool = new DistributedResourcePool(interpreterGroup.getId(), intpEventClient);

    // reset all the available InterpreterContext's components that use intpEventClient.
    for (InterpreterContext context : InterpreterContext.getAllContexts().values()) {
      context.setIntpEventClient(intpEventClient);
      context.setAngularObjectRegistry(angularObjectRegistry);
      context.setResourcePool(resourcePool);
    }
  } catch (Exception e) {
    throw new TException("Fail to reconnect", e);
  }
}
 
Example #9
Source File: JobManager.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
public void sendFlinkJobUrl(InterpreterContext context) {
  JobClient jobClient = jobs.get(context.getParagraphId());
  if (jobClient != null) {
    String jobUrl = null;
    if (replacedFlinkWebUrl != null) {
      jobUrl = replacedFlinkWebUrl + "#/job/" + jobClient.getJobID();
    } else {
      jobUrl = flinkWebUrl + "#/job/" + jobClient.getJobID();
    }
    Map<String, String> infos = new HashMap<>();
    infos.put("jobUrl", jobUrl);
    infos.put("label", "FLINK JOB");
    infos.put("tooltip", "View in Flink web UI");
    infos.put("noteId", context.getNoteId());
    infos.put("paraId", context.getParagraphId());
    context.getIntpEventClient().onParaInfosReceived(infos);
  } else {
    LOGGER.warn("No job is associated with paragraph: " + context.getParagraphId());
  }
}
 
Example #10
Source File: IPythonInterpreterTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Override
public void setUp() throws InterpreterException {
  Properties properties = initIntpProperties();
  startInterpreter(properties);

  InterpreterContext context = getInterpreterContext();
  InterpreterResult result = interpreter.interpret("import sys\nsys.version_info.major", context);
  assertEquals(InterpreterResult.Code.SUCCESS, result.code());
  try {
    List<InterpreterResultMessage> messages = context.out.toInterpreterResultMessage();
    if (messages.get(0).getData().equals("2")) {
      isPython2 = true;
    } else {
      isPython2 = false;
    }
  } catch (IOException e) {
    throw new InterpreterException(e);
  }

}
 
Example #11
Source File: SparkShimsTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Before
public void setUp() {
  mockContext = mock(InterpreterContext.class);
  mockIntpEventClient = mock(RemoteInterpreterEventClient.class);
  when(mockContext.getIntpEventClient()).thenReturn(mockIntpEventClient);
  doNothing().when(mockIntpEventClient).onParaInfosReceived(argumentCaptor.capture());

  try {
    sparkShims = SparkShims.getInstance(SparkVersion.SPARK_3_1_0.toString(), new Properties(), null);
  } catch (Throwable e1) {
    try {
      sparkShims = SparkShims.getInstance(SparkVersion.SPARK_2_0_0.toString(), new Properties(), null);
    } catch (Throwable e2) {
      try {
        sparkShims = SparkShims.getInstance(SparkVersion.SPARK_1_6_0.toString(), new Properties(), null);
      } catch (Throwable e3) {
        throw new RuntimeException("All SparkShims are tried, but no one can be created.");
      }
    }
  }
}
 
Example #12
Source File: KSQLInterpreter.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Override
public InterpreterResult interpret(String query,
      InterpreterContext context) throws InterpreterException {
  if (StringUtils.isBlank(query)) {
    return new InterpreterResult(InterpreterResult.Code.SUCCESS);
  }
  interpreterOutput.setInterpreterOutput(context.out);
  try {
    interpreterOutput.getInterpreterOutput().flush();
    interpreterOutput.getInterpreterOutput().write("%table");
    interpreterOutput.getInterpreterOutput().write(NEW_LINE);
    Set<String> header = new LinkedHashSet<>();
    executeQuery(context.getParagraphId(), query.trim(), header);
    return new InterpreterResult(InterpreterResult.Code.SUCCESS);
  } catch (IOException e) {
    return new InterpreterResult(InterpreterResult.Code.ERROR, e.getMessage());
  }
}
 
Example #13
Source File: IPythonKernelTest.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
@Test
public void testUpdateOutput() throws IOException, InterpreterException {
  InterpreterContext context = getInterpreterContext();
  String st = "import sys\n" +
          "import time\n" +
          "from IPython.display import display, clear_output\n" +
          "for i in range(10):\n" +
          "    time.sleep(0.25)\n" +
          "    clear_output(wait=True)\n" +
          "    print(i)\n" +
          "    sys.stdout.flush()";
  InterpreterResult result = interpreter.interpret(st, context);
  List<InterpreterResultMessage> interpreterResultMessages =
          context.out.toInterpreterResultMessage();
  assertEquals(InterpreterResult.Code.SUCCESS, result.code());
  assertEquals("9\n", interpreterResultMessages.get(0).getData());
}
 
Example #14
Source File: IPyFlinkInterpreterTest.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
protected InterpreterContext getInterpreterContext() {
  InterpreterContext context = InterpreterContext.builder()
          .setNoteId("noteId")
          .setParagraphId("paragraphId")
          .setInterpreterOut(new InterpreterOutput(null))
          .setAngularObjectRegistry(angularObjectRegistry)
          .setIntpEventClient(mock(RemoteInterpreterEventClient.class))
          .build();
  InterpreterContext.set(context);
  return context;
}
 
Example #15
Source File: PythonInterpreter.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
@Override
public int getProgress(InterpreterContext context) throws InterpreterException {
  if (iPythonInterpreter != null) {
    return iPythonInterpreter.getProgress(context);
  }
  return 0;
}
 
Example #16
Source File: TerminalManager.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
public void onWebSocketClose(TerminalSocket terminalSocket, String noteId, String paragraphId) {
  String id = formatId(noteId, paragraphId);
  InterpreterContext intpContext = noteParagraphId2IntpContext.get(id);
  if (null != intpContext) {
    intpContext.getAngularObjectRegistry().add(TERMINAL_SOCKET_STATUS, TERMINAL_SOCKET_CLOSE,
        intpContext.getNoteId(), intpContext.getParagraphId());
  } else {
    LOGGER.error("Cann't find InterpreterContext from : " + id);
    LOGGER.error(noteParagraphId2IntpContext.toString());
  }

  removeTerminalService(terminalSocket);
}
 
Example #17
Source File: UniverseInterpreter.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
@Override
public void cancel(InterpreterContext context) throws InterpreterException {
  try {
    client.closeSession(context.getParagraphId());
  } catch (Exception e) {
    logger.error("Error close SAP session", e );
  }
}
 
Example #18
Source File: BasePigInterpreter.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
@Override
public void cancel(InterpreterContext context) {
  LOGGER.info("Cancel paragraph:" + context.getParagraphId());
  PigScriptListener listener = listenerMap.get(context.getParagraphId());
  if (listener != null) {
    Set<String> jobIds = listener.getJobIds();
    if (jobIds.isEmpty()) {
      LOGGER.info("No job is started, so can not cancel paragraph:" + context.getParagraphId());
    }
    for (String jobId : jobIds) {
      LOGGER.info("Kill jobId:" + jobId);
      HExecutionEngine engine =
              (HExecutionEngine) getPigServer().getPigContext().getExecutionEngine();
      try {
        Field launcherField = HExecutionEngine.class.getDeclaredField("launcher");
        launcherField.setAccessible(true);
        Launcher launcher = (Launcher) launcherField.get(engine);
        // It doesn't work for Tez Engine due to PIG-5035
        launcher.killJob(jobId, new Configuration());
      } catch (NoSuchFieldException | BackendException | IllegalAccessException e) {
        LOGGER.error("Fail to cancel paragraph:" + context.getParagraphId(), e);
      }
    }
  } else {
    LOGGER.warn("No PigScriptListener found, can not cancel paragraph:"
            + context.getParagraphId());
  }
}
 
Example #19
Source File: SparkSqlInterpreterTest.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setUp() throws Exception {
  Properties p = new Properties();
  p.setProperty(SparkStringConstants.MASTER_PROP_NAME, "local[4]");
  p.setProperty(SparkStringConstants.APP_NAME_PROP_NAME, "test");
  p.setProperty("zeppelin.spark.maxResult", "10");
  p.setProperty("zeppelin.spark.concurrentSQL", "true");
  p.setProperty("zeppelin.spark.sql.stacktrace", "true");
  p.setProperty("zeppelin.spark.useHiveContext", "true");
  p.setProperty("zeppelin.spark.deprecatedMsg.show", "false");

  intpGroup = new InterpreterGroup();
  sparkInterpreter = new SparkInterpreter(p);
  sparkInterpreter.setInterpreterGroup(intpGroup);

  sqlInterpreter = new SparkSqlInterpreter(p);
  sqlInterpreter.setInterpreterGroup(intpGroup);
  intpGroup.put("session_1", new LinkedList<Interpreter>());
  intpGroup.get("session_1").add(sparkInterpreter);
  intpGroup.get("session_1").add(sqlInterpreter);

  context = InterpreterContext.builder()
      .setNoteId("noteId")
      .setParagraphId("paragraphId")
      .setParagraphTitle("title")
      .setAngularObjectRegistry(new AngularObjectRegistry(intpGroup.getId(), null))
      .setResourcePool(new LocalResourcePool("id"))
      .setInterpreterOut(new InterpreterOutput(null))
      .setIntpEventClient(mock(RemoteInterpreterEventClient.class))
      .build();
  InterpreterContext.set(context);

  sparkInterpreter.open();
  sqlInterpreter.open();
}
 
Example #20
Source File: HazelcastJetInterpreterUtilsTest.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setUp() {
  Properties p = new Properties();
  jet = new HazelcastJetInterpreter(p);
  jet.open();
  context = InterpreterContext.builder().build();
}
 
Example #21
Source File: FlinkSqlInterrpeter.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
private void callDropTable(String sql, InterpreterContext context) throws IOException {
  try {
    lock.lock();
    this.tbenv.sqlUpdate(sql);
  } finally {
    if (lock.isHeldByCurrentThread()) {
      lock.unlock();
    }
  }
  context.out.write("Table has been dropped.\n");
}
 
Example #22
Source File: IRInterpreterTest.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
@Override
protected InterpreterContext getInterpreterContext() {
  InterpreterContext context = InterpreterContext.builder()
          .setNoteId("note_1")
          .setParagraphId("paragraph_1")
          .setInterpreterOut(new InterpreterOutput(null))
          .setLocalProperties(new HashMap<>())
          .build();
  return context;
}
 
Example #23
Source File: FlinkSqlInterrpeter.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
private void callAlterDatabase(String sql, InterpreterContext context) throws IOException {
  try {
    lock.lock();
    this.tbenv.sqlUpdate(sql);
  } finally {
    if (lock.isHeldByCurrentThread()) {
      lock.unlock();
    }
  }
  context.out.write("Database has been modified.\n");
}
 
Example #24
Source File: PythonInterpreterTest.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
@Test
public void testFailtoLaunchPythonProcess() throws InterpreterException {
  tearDown();

  intpGroup = new InterpreterGroup();

  Properties properties = new Properties();
  properties.setProperty("zeppelin.python", "invalid_python");
  properties.setProperty("zeppelin.python.useIPython", "false");
  properties.setProperty("zeppelin.python.gatewayserver_address", "127.0.0.1");

  interpreter = new LazyOpenInterpreter(new PythonInterpreter(properties));

  intpGroup.put("note", new LinkedList<Interpreter>());
  intpGroup.get("note").add(interpreter);
  interpreter.setInterpreterGroup(intpGroup);

  InterpreterContext.set(getInterpreterContext());

  try {
    interpreter.interpret("1+1", getInterpreterContext());
    fail("Should fail to open PythonInterpreter");
  } catch (InterpreterException e) {
    String stacktrace = ExceptionUtils.getStackTrace(e);
    assertTrue(stacktrace, stacktrace.contains("No such file or directory"));
  }
}
 
Example #25
Source File: SparkInterpreterTest.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
@Test
public void testScopedMode() throws InterpreterException {
  Properties properties = new Properties();
  properties.setProperty(SparkStringConstants.MASTER_PROP_NAME, "local");
  properties.setProperty(SparkStringConstants.APP_NAME_PROP_NAME, "test");
  properties.setProperty("zeppelin.spark.maxResult", "100");
  // disable color output for easy testing
  properties.setProperty("zeppelin.spark.scala.color", "false");
  properties.setProperty("zeppelin.spark.deprecatedMsg.show", "false");

  SparkInterpreter interpreter1 = new SparkInterpreter(properties);
  SparkInterpreter interpreter2 = new SparkInterpreter(properties);

  InterpreterGroup interpreterGroup = new InterpreterGroup();
  interpreter1.setInterpreterGroup(interpreterGroup);
  interpreter2.setInterpreterGroup(interpreterGroup);

  interpreterGroup.addInterpreterToSession(interpreter1, "session_1");
  interpreterGroup.addInterpreterToSession(interpreter2, "session_2");

  InterpreterContext.set(getInterpreterContext());
  interpreter1.open();
  interpreter2.open();

  InterpreterContext context = getInterpreterContext();

  InterpreterResult result1 = interpreter1.interpret("sc.range(1, 10).sum", context);
  assertEquals(InterpreterResult.Code.SUCCESS, result1.code());

  InterpreterResult result2 = interpreter2.interpret("sc.range(1, 10).sum", context);
  assertEquals(InterpreterResult.Code.SUCCESS, result2.code());

  // interpreter2 continue to work after interpreter1 is closed
  interpreter1.close();

  result2 = interpreter2.interpret("sc.range(1, 10).sum", context);
  assertEquals(InterpreterResult.Code.SUCCESS, result2.code());
  interpreter2.close();
}
 
Example #26
Source File: IPyFlinkInterpreterTest.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
private static InterpreterContext createInterpreterContext() {
  InterpreterContext context = InterpreterContext.builder()
          .setNoteId("noteId")
          .setParagraphId("paragraphId")
          .setInterpreterOut(new InterpreterOutput(null))
          .setIntpEventClient(mock(RemoteInterpreterEventClient.class))
          .setAngularObjectRegistry(angularObjectRegistry)
          .build();
  InterpreterContext.set(context);
  return context;
}
 
Example #27
Source File: Paragraph.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
private InterpreterContext getInterpreterContext() {
  AngularObjectRegistry registry = null;
  ResourcePool resourcePool = null;
  String replName = null;
  if (this.interpreter != null) {
    registry = this.interpreter.getInterpreterGroup().getAngularObjectRegistry();
    resourcePool = this.interpreter.getInterpreterGroup().getResourcePool();
    InterpreterSetting interpreterSetting = ((ManagedInterpreterGroup)
            interpreter.getInterpreterGroup()).getInterpreterSetting();
    replName = interpreterSetting.getName();
  }

  Credentials credentials = note.getCredentials();
  if (subject != null) {
    UserCredentials userCredentials;
    try {
      userCredentials = credentials.getUserCredentials(subject.getUser());
    } catch (IOException e) {
      LOGGER.warn("Unable to get Usercredentials. Working with empty UserCredentials", e);
      userCredentials = new UserCredentials();
    }
    subject.setUserCredentials(userCredentials);
  }

  return InterpreterContext.builder()
          .setNoteId(note.getId())
          .setNoteName(note.getName())
          .setParagraphId(getId())
          .setReplName(replName)
          .setParagraphTitle(title)
          .setParagraphText(text)
          .setAuthenticationInfo(subject)
          .setLocalProperties(localProperties)
          .setConfig(config)
          .setGUI(settings)
          .setNoteGUI(getNoteGui())
          .setAngularObjectRegistry(registry)
          .setResourcePool(resourcePool)
          .build();
}
 
Example #28
Source File: FlinkInterpreterTest.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
private InterpreterContext getInterpreterContext() {
  InterpreterContext context = InterpreterContext.builder()
          .setParagraphId("paragraphId")
          .setInterpreterOut(new InterpreterOutput(null))
          .setAngularObjectRegistry(angularObjectRegistry)
          .setIntpEventClient(mock(RemoteInterpreterEventClient.class))
          .build();
  InterpreterContext.set(context);
  return context;
}
 
Example #29
Source File: JDBCInterpreterTest.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
private InterpreterContext getInterpreterContext() {
  return InterpreterContext.builder()
          .setAuthenticationInfo(new AuthenticationInfo("testUser"))
          .setParagraphId("paragraphId")
          .setInterpreterOut(new InterpreterOutput(null))
          .build();
}
 
Example #30
Source File: Spark1Shims.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
public void setupSparkListener(final String master,
                               final String sparkWebUrl,
                               final InterpreterContext context) {
  SparkContext sc = SparkContext.getOrCreate();
  sc.addSparkListener(new JobProgressListener(sc.getConf()) {
    @Override
    public void onJobStart(SparkListenerJobStart jobStart) {
      if (sc.getConf().getBoolean("spark.ui.enabled", true) &&
          !Boolean.parseBoolean(properties.getProperty("zeppelin.spark.ui.hidden", "false"))) {
        buildSparkJobUrl(master, sparkWebUrl, jobStart.jobId(), jobStart.properties(), context);
      }
    }
  });
}