org.springframework.shell.core.CommandResult Java Examples

The following examples show how to use org.springframework.shell.core.CommandResult. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ITTestHDFSParquetImportCommand.java    From hudi with Apache License 2.0 6 votes vote down vote up
/**
 * Test case for 'hdfsparquetimport' with insert.
 */
@Test
public void testConvertWithInsert() throws IOException {
  String command = String.format("hdfsparquetimport --srcPath %s --targetPath %s --tableName %s "
      + "--tableType %s --rowKeyField %s" + " --partitionPathField %s --parallelism %s "
      + "--schemaFilePath %s --format %s --sparkMemory %s --retry %s --sparkMaster %s",
      sourcePath.toString(), targetPath.toString(), tableName, HoodieTableType.COPY_ON_WRITE.name(),
      "_row_key", "timestamp", "1", schemaFile, "parquet", "2G", "1", "local");
  CommandResult cr = getShell().executeCommand(command);

  assertAll("Command run success",
      () -> assertTrue(cr.isSuccess()),
      () -> assertEquals("Table imported to hoodie format", cr.getResult().toString()));

  // Check hudi table exist
  String metaPath = targetPath + File.separator + HoodieTableMetaClient.METAFOLDER_NAME;
  assertTrue(Files.exists(Paths.get(metaPath)), "Hoodie table not exist.");

  // Load meta data
  new TableCommand().connect(targetPath.toString(), TimelineLayoutVersion.VERSION_1, false, 2000, 300000, 7);
  metaClient = HoodieCLI.getTableMetaClient();

  assertEquals(1, metaClient.getActiveTimeline().getCommitsTimeline().countInstants(), "Should only 1 commit.");

  verifyResultData(insertData);
}
 
Example #2
Source File: TestSavepointsCommand.java    From hudi with Apache License 2.0 6 votes vote down vote up
/**
 * Test case of command 'savepoints show'.
 */
@Test
public void testShowSavepoints() throws IOException {
  // generate four savepoints
  for (int i = 100; i < 104; i++) {
    String instantTime = String.valueOf(i);
    HoodieTestDataGenerator.createSavepointFile(tablePath, instantTime, jsc.hadoopConfiguration());
  }

  CommandResult cr = getShell().executeCommand("savepoints show");
  assertTrue(cr.isSuccess());

  // generate expect result
  String[][] rows = Arrays.asList("100", "101", "102", "103").stream().sorted(Comparator.reverseOrder())
      .map(instant -> new String[]{instant}).toArray(String[][]::new);
  String expected = HoodiePrintHelper.print(new String[] {HoodieTableHeaderFields.HEADER_SAVEPOINT_TIME}, rows);
  expected = removeNonWordAndStripSpace(expected);
  String got = removeNonWordAndStripSpace(cr.getResult().toString());
  assertEquals(expected, got);
}
 
Example #3
Source File: TestHoodieLogFileCommand.java    From hudi with Apache License 2.0 6 votes vote down vote up
/**
 * Test case for 'show logfile metadata'.
 */
@Test
public void testShowLogFileCommits() throws JsonProcessingException {
  CommandResult cr = getShell().executeCommand("show logfile metadata --logFilePathPattern " + partitionPath + "/*");
  assertTrue(cr.isSuccess());

  TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_INSTANT_TIME)
      .addTableHeaderField(HoodieTableHeaderFields.HEADER_RECORD_COUNT)
      .addTableHeaderField(HoodieTableHeaderFields.HEADER_BLOCK_TYPE)
      .addTableHeaderField(HoodieTableHeaderFields.HEADER_HEADER_METADATA)
      .addTableHeaderField(HoodieTableHeaderFields.HEADER_FOOTER_METADATA);

  // construct expect result, there is only 1 line.
  List<Comparable[]> rows = new ArrayList<>();
  ObjectMapper objectMapper = new ObjectMapper();
  String headerStr = objectMapper.writeValueAsString(dataBlock.getLogBlockHeader());
  String footerStr = objectMapper.writeValueAsString(dataBlock.getLogBlockFooter());
  Comparable[] output = new Comparable[]{INSTANT_TIME, 100, dataBlock.getBlockType(), headerStr, footerStr};
  rows.add(output);

  String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows);
  expected = removeNonWordAndStripSpace(expected);
  String got = removeNonWordAndStripSpace(cr.getResult().toString());
  assertEquals(expected, got);
}
 
Example #4
Source File: TestCommitsCommand.java    From hudi with Apache License 2.0 6 votes vote down vote up
/**
 * Test case of 'commits sync' command.
 */
@Test
public void testSyncCommits() throws IOException {
  Map<String, Integer[]> data = generateData();

  String tableName2 = "test_table2";
  String tablePath2 = basePath + File.separator + tableName2;
  HoodieTestUtils.init(jsc.hadoopConfiguration(), tablePath2, getTableType(), tableName2);

  data.remove("102");
  data.forEach((key, value) -> {
    HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath2, key, jsc.hadoopConfiguration(),
        Option.of(value[0]), Option.of(value[1]));
  });

  CommandResult cr = getShell().executeCommand(String.format("commits sync --path %s", tablePath2));
  assertTrue(cr.isSuccess());

  String expected = String.format("Load sync state between %s and %s", tableName, tableName2);
  assertEquals(expected, cr.getResult().toString());
}
 
Example #5
Source File: TestCommitsCommand.java    From hudi with Apache License 2.0 6 votes vote down vote up
/**
 * Test case of 'commits compare' command.
 */
@Test
public void testCompareCommits() throws IOException {
  Map<String, Integer[]> data = generateData();

  String tableName2 = "test_table2";
  String tablePath2 = basePath + File.separator + tableName2;
  HoodieTestUtils.init(jsc.hadoopConfiguration(), tablePath2, getTableType());

  data.remove("102");
  data.forEach((key, value) -> {
    HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath2, key, jsc.hadoopConfiguration(),
        Option.of(value[0]), Option.of(value[1]));
  });

  CommandResult cr = getShell().executeCommand(String.format("commits compare --path %s", tablePath2));
  assertTrue(cr.isSuccess());

  // the latest instant of test_table2 is 101
  List<String> commitsToCatchup = metaClient.getActiveTimeline().findInstantsAfter("101", Integer.MAX_VALUE)
      .getInstants().map(HoodieInstant::getTimestamp).collect(Collectors.toList());
  String expected = String.format("Source %s is ahead by %d commits. Commits to catch up - %s",
      tableName, commitsToCatchup.size(), commitsToCatchup);
  assertEquals(expected, cr.getResult().toString());
}
 
Example #6
Source File: MongoRyaShellIT.java    From rya with Apache License 2.0 6 votes vote down vote up
@Test
public void printConnectionDetails_connectedToMongo_noAuths() throws IOException {
    final JLineShellComponent shell = getTestShell();

    // Connect to the Mongo instance.
    final String cmd =
            RyaConnectionCommands.CONNECT_MONGO_CMD + " " +
                    "--hostname " + super.getMongoHostname() + " " +
                    "--port " + super.getMongoPort();
    shell.executeCommand(cmd);

    // Run the print connection details command.
    final CommandResult printResult = shell.executeCommand( RyaConnectionCommands.PRINT_CONNECTION_DETAILS_CMD );
    final String msg = (String) printResult.getResult();

    final String expected =
            "The shell is connected to an instance of MongoDB using the following parameters:\n" +
            "    Hostname: " + super.getMongoHostname() + "\n" +
            "    Port: " + super.getMongoPort() + "\n";
    assertEquals(expected, msg);
}
 
Example #7
Source File: AccumuloRyaConnectionCommandsIT.java    From rya with Apache License 2.0 6 votes vote down vote up
@Test
public void connectAccumulo() throws IOException {
    final MiniAccumuloCluster cluster = getCluster();
    final Bootstrap bootstrap = getTestBootstrap();
    final JLineShellComponent shell = getTestShell();

    // Mock the user entering the correct password.
    final ApplicationContext context = bootstrap.getApplicationContext();
    final PasswordPrompt mockPrompt = context.getBean( PasswordPrompt.class );
    when(mockPrompt.getPassword()).thenReturn("password".toCharArray());

    // Execute the connect command.
    final String cmd =
            RyaConnectionCommands.CONNECT_ACCUMULO_CMD + " " +
                    "--username root " +
                    "--instanceName " + cluster.getInstanceName() + " "+
                    "--zookeepers " + cluster.getZooKeepers();

    final CommandResult connectResult = shell.executeCommand(cmd);

    // Ensure the connection was successful.
    assertTrue( connectResult.isSuccess() );
}
 
Example #8
Source File: TaskCommandTests.java    From spring-cloud-dataflow with Apache License 2.0 6 votes vote down vote up
@Test
public void testTaskExecutionList() {
	logger.info("Retrieve Task Execution List Test");
	CommandResult cr = task().taskExecutionList();
	assertTrue("task execution list command must be successful", cr.isSuccess());
	Table table = (Table) cr.getResult();
	assertEquals("Number of columns returned was not expected", 5, table.getModel().getColumnCount());
	verifyTableValue(table, 0, 0, "Task Name");
	verifyTableValue(table, 0, 1, "ID");
	verifyTableValue(table, 0, 2, "Start Time");
	verifyTableValue(table, 0, 3, "End Time");
	verifyTableValue(table, 0, 4, "Exit Code");

	verifyTableValue(table, 1, 0, TASK_NAME);
	verifyTableValue(table, 1, 1, TASK_EXECUTION_ID);
	verifyTableValue(table, 1, 2, startTime);
	verifyTableValue(table, 1, 3, endTime);
	verifyTableValue(table, 1, 4, EXIT_CODE);
}
 
Example #9
Source File: AccumuloRyaConnectionCommandsIT.java    From rya with Apache License 2.0 6 votes vote down vote up
@Test
public void connectToInstance_instanceDoesNotExist() throws IOException {
    final MiniAccumuloCluster cluster = getCluster();
    final Bootstrap bootstrap = getTestBootstrap();
    final JLineShellComponent shell = getTestShell();

    // Mock the user entering the correct password.
    final ApplicationContext context = bootstrap.getApplicationContext();
    final PasswordPrompt mockPrompt = context.getBean( PasswordPrompt.class );
    when(mockPrompt.getPassword()).thenReturn("password".toCharArray());

    // Connect to the mini accumulo instance.
    String cmd =
            RyaConnectionCommands.CONNECT_ACCUMULO_CMD + " " +
                    "--username root " +
                    "--instanceName " + cluster.getInstanceName() + " "+
                    "--zookeepers " + cluster.getZooKeepers();
    shell.executeCommand(cmd);

    // Try to connect to a non-existing instance.
    cmd = RyaConnectionCommands.CONNECT_INSTANCE_CMD + " --instance doesNotExist";
    final CommandResult result = shell.executeCommand(cmd);
    assertFalse( result.isSuccess() );
}
 
Example #10
Source File: TestTableCommand.java    From hudi with Apache License 2.0 6 votes vote down vote up
/**
 * Test Case for connect table.
 */
@Test
public void testConnectTable() {
  // Prepare table
  assertTrue(prepareTable());

  // Test connect with specified values
  CommandResult cr = getShell().executeCommand(
      "connect --path " + tablePath + " --initialCheckIntervalMs 3000 "
          + "--maxWaitIntervalMs 40000 --maxCheckIntervalMs 8");
  assertTrue(cr.isSuccess());

  // Check specified values
  ConsistencyGuardConfig conf = HoodieCLI.consistencyGuardConfig;
  assertEquals(3000, conf.getInitialConsistencyCheckIntervalMs());
  assertEquals(40000, conf.getMaxConsistencyCheckIntervalMs());
  assertEquals(8, conf.getMaxConsistencyChecks());

  // Check default values
  assertFalse(conf.isConsistencyCheckEnabled());
  assertEquals(new Integer(1), HoodieCLI.layoutVersion.getVersion());
}
 
Example #11
Source File: StreamCommandTemplate.java    From spring-cloud-dataflow with Apache License 2.0 6 votes vote down vote up
private void doCreate(String streamname, String streamdefinition, boolean deploy, Object... values) {
	String actualDefinition = String.format(streamdefinition, values);
	// Shell parser expects quotes to be escaped by \
	String wholeCommand = String.format("stream create --name \"%s\" --definition \"%s\" --deploy %s", streamname,
			actualDefinition.replaceAll("\"", "\\\\\""), deploy);
	CommandResult cr = shell.executeCommand(wholeCommand);
	// todo: Add deployment and verifier
	// if (deploy) {
	// stateVerifier.waitForDeploy(streamname);
	// }
	// else {
	// stateVerifier.waitForCreate(streamname);
	// }
	// add the stream name to the streams list before assertion
	streams.add(streamname);
	String deployMsg = "Created new stream '" + streamname + "'";
	if (deploy) {
		deployMsg += "\nDeployment request has been sent";
	}
	assertEquals(deployMsg, cr.getResult());

	verifyExists(streamname, actualDefinition, deploy);
}
 
Example #12
Source File: ITTestSavepointsCommand.java    From hudi with Apache License 2.0 6 votes vote down vote up
/**
 * Test case of command 'savepoint create'.
 */
@Test
public void testSavepoint() {
  // generate four savepoints
  for (int i = 100; i < 104; i++) {
    String instantTime = String.valueOf(i);
    HoodieTestDataGenerator.createCommitFile(tablePath, instantTime, jsc.hadoopConfiguration());
  }

  String savepoint = "102";
  CommandResult cr = getShell().executeCommand(
      String.format("savepoint create --commit %s --sparkMaster %s", savepoint, "local"));

  assertAll("Command run failed",
      () -> assertTrue(cr.isSuccess()),
      () -> assertEquals(
          String.format("The commit \"%s\" has been savepointed.", savepoint), cr.getResult().toString()));

  // there is 1 savepoint instant
  HoodieActiveTimeline timeline = HoodieCLI.getTableMetaClient().getActiveTimeline();
  assertEquals(1, timeline.getSavePointTimeline().countInstants());
}
 
Example #13
Source File: TaskCommandTemplate.java    From spring-cloud-dataflow with Apache License 2.0 5 votes vote down vote up
/**
 * Destroy all tasks.
 *
 */
public void destroyAllTasks() {
	CommandResult cr = shell.executeCommand("task all destroy --force");
	// stateVerifier.waitForDestroy(task);
	assertTrue("Failure to destroy all tasks. CommandResult = " + cr.toString(), cr.isSuccess());
	tasks.clear();
}
 
Example #14
Source File: AbstractShellIntegrationTest.java    From spring-cloud-dataflow with Apache License 2.0 5 votes vote down vote up
@Override
public CommandResult executeCommand(String command) {
	CommandResult cr = this.shell.executeCommand(command);
	if (cr.getException() != null) {
		cr.getException().printStackTrace();
	}
	Assert.isTrue(cr.isSuccess(), "Failure.  CommandResult = " + cr.toString());
	return cr;
}
 
Example #15
Source File: TaskCommandTests.java    From spring-cloud-dataflow with Apache License 2.0 5 votes vote down vote up
@Test
public void testViewExecution() {
	logger.info("Retrieve Task Execution Status by Id");

	CommandResult idResult = task().taskExecutionList();
	Table result = (Table) idResult.getResult();
	long value = (long) result.getModel().getValue(findRowForExecutionId(result, TASK_EXECUTION_ID), 1);
	logger.info("Looking up id " + value);
	CommandResult cr = task().taskExecutionStatus(value);
	assertTrue("task execution status command must be successful", cr.isSuccess());
	Table table = (Table) cr.getResult();
	assertEquals("Number of columns returned was not expected", 2, table.getModel().getColumnCount());
	verifyTableValue(table, 0, 0, "Key ");
	verifyTableValue(table, 1, 0, "Id ");
	verifyTableValue(table, 2, 0, "Resource URL ");
	verifyTableValue(table, 3, 0, "Name ");
	verifyTableValue(table, 4, 0, "CLI Arguments ");
	verifyTableValue(table, 5, 0, "App Arguments ");
	verifyTableValue(table, 6, 0, "Deployment Properties ");
	verifyTableValue(table, 7, 0, "Job Execution Ids ");
	verifyTableValue(table, 8, 0, "Start Time ");
	verifyTableValue(table, 9, 0, "End Time ");
	verifyTableValue(table, 10, 0, "Exit Code ");
	verifyTableValue(table, 11, 0, "Exit Message ");
	verifyTableValue(table, 12, 0, "Error Message ");
	verifyTableValue(table, 13, 0, "External Execution Id ");

	verifyTableValue(table, 1, 1, TASK_EXECUTION_ID);
	verifyTableValue(table, 3, 1, TASK_NAME);
	verifyTableValue(table, 8, 1, startTime);
	verifyTableValue(table, 9, 1, endTime);
	verifyTableValue(table, 10, 1, EXIT_CODE);
	verifyTableValue(table, 11, 1, EXIT_MESSAGE);
	verifyTableValue(table, 12, 1, ERROR_MESSAGE);
	verifyTableValue(table, 13, 1, EXTERNAL_EXECUTION_ID);
}
 
Example #16
Source File: TaskCommandTests.java    From spring-cloud-dataflow with Apache License 2.0 5 votes vote down vote up
@Test
public void testCurrentExecutions() {
	CommandResult cr = task().taskExecutionCurrent();
	Table table = (Table) cr.getResult();
	assertEquals("Number of columns returned was not expected", 4, table.getModel().getColumnCount());
	verifyTableValue(table, 0, 0, "Platform Name");
	verifyTableValue(table, 0, 1, "Platform Type");
	verifyTableValue(table, 0, 2, "Execution Count");
	verifyTableValue(table, 0, 3, "Maximum Executions");

	verifyTableValue(table, 1, 0, "default");
	verifyTableValue(table, 1, 1, "Local");
	verifyTableValue(table, 1, 3, 20);
}
 
Example #17
Source File: TaskCommandTests.java    From spring-cloud-dataflow with Apache License 2.0 5 votes vote down vote up
@Test
public void testPlatformList() {
	CommandResult cr = task().taskPlatformList();
	Table table = (Table) cr.getResult();
	assertEquals("Number of columns returned was not expected", 3, table.getModel().getColumnCount());
	assertEquals("First Row First Value should be: Platform Name", "Platform Name", table.getModel().getValue(0, 0));
	assertEquals("First Row Second Value should be: Platform Type", "Platform Type", table.getModel().getValue(0, 1));
	assertEquals("First Row Second Value should be: Description", "Description", table.getModel().getValue(0, 2));
	assertEquals("Second Row First Value should be: default", "default", table.getModel().getValue(1, 0));
	assertEquals("Second Row Second Value should be: Local", "Local", table.getModel().getValue(1, 1));
}
 
Example #18
Source File: StreamCommandTemplate.java    From spring-cloud-dataflow with Apache License 2.0 5 votes vote down vote up
/**
 * Deploy the given stream
 *
 * @param streamname name of the stream
 */
public void deploy(String streamname) {
	CommandResult cr = shell.executeCommand("stream deploy --name " + streamname);
	// stateVerifier.waitForDeploy(streamname);
	assertTrue("Failure.  CommandResult = " + cr.toString(), cr.isSuccess());
	assertEquals("Deployed stream '" + streamname + "'", cr.getResult());
}
 
Example #19
Source File: MongoRyaShellIT.java    From rya with Apache License 2.0 5 votes vote down vote up
@Test
public void connectToInstance_noAuths() throws IOException {
    final Bootstrap bootstrap = getTestBootstrap();
    final JLineShellComponent shell = getTestShell();

    // Connect to the Mongo instance.
    String cmd =
            RyaConnectionCommands.CONNECT_MONGO_CMD + " " +
                    "--hostname " + super.getMongoHostname() + " " +
                    "--port " + super.getMongoPort();
    shell.executeCommand(cmd);

    // Install an instance of rya.
    final String instanceName = "testInstance";
    final InstallConfiguration installConf = InstallConfiguration.builder().build();

    final ApplicationContext context = bootstrap.getApplicationContext();
    final InstallPrompt installPrompt = context.getBean( InstallPrompt.class );
    when(installPrompt.promptInstanceName()).thenReturn("testInstance");
    when(installPrompt.promptInstallConfiguration("testInstance")).thenReturn( installConf );
    when(installPrompt.promptVerified(instanceName, installConf)).thenReturn(true);

    CommandResult result = shell.executeCommand( RyaAdminCommands.INSTALL_CMD );
    assertTrue( result.isSuccess() );

    // Connect to the instance that was just installed.
    cmd = RyaConnectionCommands.CONNECT_INSTANCE_CMD + " --instance " + instanceName;
    result = shell.executeCommand(cmd);
    assertTrue( result.isSuccess() );

    // Verify the shell state indicates it is connected to an instance.
    final SharedShellState sharedState = context.getBean( SharedShellState.class );
    final ShellState state = sharedState.getShellState();
    assertEquals(ConnectionState.CONNECTED_TO_INSTANCE, state.getConnectionState());
}
 
Example #20
Source File: StreamCommandTemplate.java    From spring-cloud-dataflow with Apache License 2.0 5 votes vote down vote up
/**
 * Destroy all streams that were created using the 'create' method. Commonly called in
 * a @After annotated method
 */
public void destroyCreatedStreams() {
	for (int s = streams.size() - 1; s >= 0; s--) {
		String streamname = streams.get(s);
		CommandResult cr = shell.executeCommand("stream destroy --name " + streamname);
		// stateVerifier.waitForDestroy(streamname);
		assertTrue("Failure to destroy stream " + streamname + ".  CommandResult = " + cr.toString(),
				cr.isSuccess());
	}
}
 
Example #21
Source File: TaskCommandTests.java    From spring-cloud-dataflow with Apache License 2.0 5 votes vote down vote up
@Test
public void testTaskExecutionListByName() {
	logger.info("Retrieve Task Execution List By Name Test");
	task().create("mytask", "timestamp");
	CommandResult cr = task().taskExecutionListByName("mytask");
	assertTrue("task execution list by name command must be successful", cr.isSuccess());
	Table table = (Table) cr.getResult();
	assertEquals("Number of columns returned was not expected", 5, table.getModel().getColumnCount());

	verifyTableValue(table,0, 0, "Task Name");
	verifyTableValue(table,0, 1, "ID");
	verifyTableValue(table,0, 2, "Start Time");
	verifyTableValue(table,0, 3, "End Time");
	verifyTableValue(table,0, 4, "Exit Code");
}
 
Example #22
Source File: StreamCommandTest.java    From Decision with Apache License 2.0 5 votes vote down vote up
@Test
public void dropStreamTest() throws StratioEngineStatusException,
        StratioAPISecurityException, StratioAPIGenericException, StratioEngineOperationException, IOException,
        StratioEngineConnectionException {

    CommandResult cr2 = shell.executeCommand("drop --stream testStream");

    assertEquals(true, cr2.isSuccess());
    assertEquals("Stream testStream dropped correctly", cr2.getResult());

}
 
Example #23
Source File: StreamCommandTest.java    From Decision with Apache License 2.0 5 votes vote down vote up
@Test
public void columnsStreamTest() throws StratioEngineStatusException,
        StratioAPISecurityException, StratioAPIGenericException, StratioEngineOperationException, IOException,
        StratioEngineConnectionException {

    String streamName = "testStream";
    List<ColumnNameTypeValue> values = new ArrayList<>();
    values.add(new ColumnNameTypeValue("column1", ColumnType.STRING, null));
    values.add(new ColumnNameTypeValue("column2", ColumnType.INTEGER, null));
    values.add(new ColumnNameTypeValue("column3", ColumnType.BOOLEAN, null));

    List<StreamQuery> querys = new ArrayList<>();
    querys.add(new StreamQuery("queryIdTest", "query raw test string"));

    Set<StreamAction> activeActions = new HashSet<>();
    activeActions.add(StreamAction.SAVE_TO_ELASTICSEARCH);

    List<StratioStream> streams = new ArrayList<>();
    StratioStream stream = new StratioStream(streamName, values, querys, activeActions, true);
    streams.add(stream);

    Mockito.when(ssaw.api().columnsFromStream("testStream")).thenReturn(values);

    CommandResult cr2 = shell.executeCommand("columns --stream testStream");

    assertEquals(true, cr2.isSuccess());
    assertEquals(getListResultFromName("columnsStream"), cr2.getResult());

}
 
Example #24
Source File: TaskCommandTemplate.java    From spring-cloud-dataflow with Apache License 2.0 5 votes vote down vote up
/**
 * Launch a task and validate the result from shell.
 *
 * @param taskName the name of the task
 */
public long launch(String taskName) {
	// add the task name to the tasks list before assertion
	tasks.add(taskName);
	CommandResult cr = shell.executeCommand("task launch " + taskName);
	CommandResult idResult = shell.executeCommand("task execution list --name " + taskName);
	Table result = (Table) idResult.getResult();

	long value = (long) result.getModel().getValue(1, 1);
	assertTrue(cr.toString().contains("with execution id " + value));
	return value;
}
 
Example #25
Source File: MongoRyaShellIT.java    From rya with Apache License 2.0 5 votes vote down vote up
@Test
public void printConnectionDetails_connectedToMongo_auths() throws IOException {
    final Bootstrap bootstrap = getTestBootstrap();
    final JLineShellComponent shell = getTestShell();

    // Mock the user entering the correct password.
    final ApplicationContext context = bootstrap.getApplicationContext();
    final PasswordPrompt mockPrompt = context.getBean( PasswordPrompt.class );
    when(mockPrompt.getPassword()).thenReturn("password".toCharArray());

    // Connect to the Mongo instance.
    final String cmd =
            RyaConnectionCommands.CONNECT_MONGO_CMD + " " +
                    "--hostname " + super.getMongoHostname() + " " +
                    "--port " + super.getMongoPort() + " " +
                    "--username bob";
    shell.executeCommand(cmd);

    // Run the print connection details command.
    final CommandResult printResult = shell.executeCommand( RyaConnectionCommands.PRINT_CONNECTION_DETAILS_CMD );
    final String msg = (String) printResult.getResult();

    final String expected =
            "The shell is connected to an instance of MongoDB using the following parameters:\n" +
            "    Hostname: " + super.getMongoHostname() + "\n" +
            "    Port: " + super.getMongoPort() + "\n" +
            "    Username: bob\n";
    assertEquals(expected, msg);
}
 
Example #26
Source File: ITTestSavepointsCommand.java    From hudi with Apache License 2.0 5 votes vote down vote up
/**
 * Test case of command 'savepoint rollback'.
 */
@Test
public void testRollbackToSavepoint() throws IOException {
  // generate four savepoints
  for (int i = 100; i < 104; i++) {
    String instantTime = String.valueOf(i);
    HoodieTestDataGenerator.createCommitFile(tablePath, instantTime, jsc.hadoopConfiguration());
  }

  // generate one savepoint
  String savepoint = "102";
  HoodieTestDataGenerator.createSavepointFile(tablePath, savepoint, jsc.hadoopConfiguration());

  CommandResult cr = getShell().executeCommand(
      String.format("savepoint rollback --savepoint %s --sparkMaster %s", savepoint, "local"));

  assertAll("Command run failed",
      () -> assertTrue(cr.isSuccess()),
      () -> assertEquals(
          String.format("Savepoint \"%s\" rolled back", savepoint), cr.getResult().toString()));

  // there is 1 restore instant
  HoodieActiveTimeline timeline = HoodieCLI.getTableMetaClient().getActiveTimeline();
  assertEquals(1, timeline.getRestoreTimeline().countInstants());

  // 103 instant had rollback
  assertFalse(timeline.getCommitTimeline().containsInstant(
      new HoodieInstant(HoodieInstant.State.COMPLETED, "commit", "103")));
}
 
Example #27
Source File: ActionCommandTest.java    From Decision with Apache License 2.0 5 votes vote down vote up
@Test
public void saveSolrStreamStartTest() throws StratioEngineStatusException, StratioAPIGenericException,
        IOException,
        StratioEngineConnectionException, StratioEngineOperationException {
    Mockito.when(ssaw.api().listStreams()).thenReturn(new ArrayList<StratioStream>());
    CommandResult cr = shell.executeCommand("save solr start --stream testStream");
    assertEquals(true, cr.isSuccess());
    assertEquals("Stream testStream attached to solr correctly", cr.getResult());
}
 
Example #28
Source File: TaskCommandTemplate.java    From spring-cloud-dataflow with Apache License 2.0 5 votes vote down vote up
private long launchTaskExecutionForLog(String taskName) throws Exception{
	// add the task name to the tasks list before assertion
	tasks.add(taskName);
	CommandResult cr = shell.executeCommand(String.format("task launch %s", taskName));
	CommandResult idResult = shell.executeCommand("task execution list --name " + taskName);
	Table taskExecutionResult = (Table) idResult.getResult();

	long id = (long) taskExecutionResult.getModel().getValue(1, 1);
	assertTrue(cr.toString().contains("with execution id " + id));
	waitForDBToBePopulated(id);
	return id;
}
 
Example #29
Source File: ITTestSavepointsCommand.java    From hudi with Apache License 2.0 5 votes vote down vote up
/**
 * Test case of command 'savepoint delete'.
 */
@Test
public void testDeleteSavepoint() throws IOException {
  // generate four savepoints
  for (int i = 100; i < 104; i++) {
    String instantTime = String.valueOf(i);
    HoodieTestDataGenerator.createCommitFile(tablePath, instantTime, jsc.hadoopConfiguration());
  }

  // generate two savepoint
  String savepoint1 = "100";
  String savepoint2 = "102";
  HoodieTestDataGenerator.createSavepointFile(tablePath, savepoint1, jsc.hadoopConfiguration());
  HoodieTestDataGenerator.createSavepointFile(tablePath, savepoint2, jsc.hadoopConfiguration());

  HoodieActiveTimeline timeline = HoodieCLI.getTableMetaClient().getActiveTimeline();
  assertEquals(2, timeline.getSavePointTimeline().countInstants(), "There should 2 instants.");

  CommandResult cr = getShell().executeCommand(
      String.format("savepoint delete --commit %s --sparkMaster %s", savepoint1, "local"));

  assertAll("Command run failed",
      () -> assertTrue(cr.isSuccess()),
      () -> assertEquals(
          String.format("Savepoint \"%s\" deleted.", savepoint1), cr.getResult().toString()));

  // reload timeline
  timeline = timeline.reload();
  assertEquals(1, timeline.getSavePointTimeline().countInstants(), "There should 1 instants.");

  // after delete, 100 instant should not exist.
  assertFalse(timeline.containsInstant(new HoodieInstant(false, HoodieTimeline.SAVEPOINT_ACTION, savepoint1)));
}
 
Example #30
Source File: StreamCommandTemplate.java    From spring-cloud-dataflow with Apache License 2.0 5 votes vote down vote up
/**
 * Destroy a specific stream
 *
 * @param stream The stream to destroy
 */
public void destroyStream(String stream) {
	CommandResult cr = shell.executeCommand("stream destroy --name " + stream);
	// stateVerifier.waitForDestroy(stream);
	assertTrue("Failure to destroy stream " + stream + ".  CommandResult = " + cr.toString(), cr.isSuccess());
	streams.remove(stream);
}