Java Code Examples for org.springframework.shell.core.annotation.CliOption

The following examples show how to use org.springframework.shell.core.annotation.CliOption. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: hdfs-shell   Source File: ContextCommands.java    License: Apache License 2.0 6 votes vote down vote up
@CliCommand(value = "su", help = "Changes current active user [*experimental*]")
    public synchronized String su(@CliOption(key = {""}, help = "su [<username>]") String newUser) throws IOException {
        if (StringUtils.isEmpty(newUser)) {
            return "No username is defined! ";
        }
//        else {
//            newUser = BashUtils.parseArguments(newUser)[0];
//        }
        final FileSystem fs = getFileSystem();
        final Path usersDir = new Path("/user");
        if (fs.exists(usersDir)) {
            final String finalNewUser = newUser;
            final boolean foundUser = Arrays.stream(fs.listStatus(usersDir)).
                    filter(FileStatus::isDirectory).
                    anyMatch(fileStatus -> fileStatus.getPath().getName().equals(finalNewUser));
            if (!foundUser) {
                return "User " + newUser + " does not exist!";
            }
        }
        System.setProperty("HADOOP_USER_NAME", newUser);
        UserGroupInformation.loginUserFromSubject(null);
        currentDir = null;
        return "";
    }
 
Example 2
Source Project: hudi   Source File: CompactionCommand.java    License: Apache License 2.0 6 votes vote down vote up
@CliCommand(value = "compactions show all", help = "Shows all compactions that are in active timeline")
public String compactionsAll(
    @CliOption(key = {"includeExtraMetadata"}, help = "Include extra metadata",
        unspecifiedDefaultValue = "false") final boolean includeExtraMetadata,
    @CliOption(key = {"limit"}, help = "Limit commits",
        unspecifiedDefaultValue = "-1") final Integer limit,
    @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField,
    @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending,
    @CliOption(key = {"headeronly"}, help = "Print Header Only",
        unspecifiedDefaultValue = "false") final boolean headerOnly)
    throws IOException {
  HoodieTableMetaClient client = checkAndGetMetaClient();
  HoodieActiveTimeline activeTimeline = client.getActiveTimeline();
  return printAllCompactions(activeTimeline,
          compactionPlanReader(this::readCompactionPlanForActiveTimeline, activeTimeline),
          includeExtraMetadata, sortByField, descending, limit, headerOnly);
}
 
Example 3
Source Project: hudi   Source File: CommitsCommand.java    License: Apache License 2.0 6 votes vote down vote up
@CliCommand(value = "commits show", help = "Show the commits")
public String showCommits(
    @CliOption(key = {"includeExtraMetadata"}, help = "Include extra metadata",
        unspecifiedDefaultValue = "false") final boolean includeExtraMetadata,
    @CliOption(key = {"createView"}, mandatory = false, help = "view name to store output table",
        unspecifiedDefaultValue = "") final String exportTableName,
    @CliOption(key = {"limit"}, help = "Limit commits",
        unspecifiedDefaultValue = "-1") final Integer limit,
    @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField,
    @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending,
    @CliOption(key = {"headeronly"}, help = "Print Header Only",
        unspecifiedDefaultValue = "false") final boolean headerOnly)
    throws IOException {

  HoodieActiveTimeline activeTimeline = HoodieCLI.getTableMetaClient().getActiveTimeline();
  if (includeExtraMetadata) {
    return printCommitsWithMetadata(activeTimeline, limit, sortByField, descending, headerOnly, exportTableName);
  } else  {
    return printCommits(activeTimeline, limit, sortByField, descending, headerOnly, exportTableName);
  }
}
 
Example 4
Source Project: hudi   Source File: TableCommand.java    License: Apache License 2.0 6 votes vote down vote up
@CliCommand(value = "connect", help = "Connect to a hoodie table")
public String connect(
    @CliOption(key = {"path"}, mandatory = true, help = "Base Path of the table") final String path,
    @CliOption(key = {"layoutVersion"}, help = "Timeline Layout version") Integer layoutVersion,
    @CliOption(key = {"eventuallyConsistent"}, unspecifiedDefaultValue = "false",
        help = "Enable eventual consistency") final boolean eventuallyConsistent,
    @CliOption(key = {"initialCheckIntervalMs"}, unspecifiedDefaultValue = "2000",
        help = "Initial wait time for eventual consistency") final Integer initialConsistencyIntervalMs,
    @CliOption(key = {"maxWaitIntervalMs"}, unspecifiedDefaultValue = "300000",
        help = "Max wait time for eventual consistency") final Integer maxConsistencyIntervalMs,
    @CliOption(key = {"maxCheckIntervalMs"}, unspecifiedDefaultValue = "7",
        help = "Max checks for eventual consistency") final Integer maxConsistencyChecks)
    throws IOException {
  HoodieCLI
      .setConsistencyGuardConfig(ConsistencyGuardConfig.newBuilder().withConsistencyCheckEnabled(eventuallyConsistent)
          .withInitialConsistencyCheckIntervalMs(initialConsistencyIntervalMs)
          .withMaxConsistencyCheckIntervalMs(maxConsistencyIntervalMs).withMaxConsistencyChecks(maxConsistencyChecks)
          .build());
  HoodieCLI.initConf();
  HoodieCLI.connectTo(path, layoutVersion);
  HoodieCLI.initFS(true);
  HoodieCLI.state = HoodieCLI.CLIState.TABLE;
  return "Metadata for table " + HoodieCLI.getTableMetaClient().getTableConfig().getTableName() + " loaded";
}
 
Example 5
@CliCommand(value = REGISTER_APPLICATION, help = "Register a new application")
public String register(
		@CliOption(mandatory = true,
				key = {"", "name"},
				help = "the name for the registered application")
		String name,
		@CliOption(mandatory = true,
				key = {"type"},
				help = "the type for the registered application")
				String type,
		@CliOption(mandatory = true,
				key = {"uri"},
				help = "URI for the application artifact")
		String uri,
		@CliOption(key = "force",
				help = "force update if application is already registered (only if not in use)",
				specifiedDefaultValue = "true",
				unspecifiedDefaultValue = "false")
		boolean force) {
	appRegistryOperations().register(name, type, uri, force);
	return String.format(("Successfully registered application '%s:%s'"), type, name);
}
 
Example 6
Source Project: gemfirexd-oss   Source File: IndexCommands.java    License: Apache License 2.0 6 votes vote down vote up
@CliCommand(value = CliStrings.LIST_INDEX, help = CliStrings.LIST_INDEX__HELP)
@CliMetaData(shellOnly = false, relatedTopic={CliStrings.TOPIC_GEMFIRE_REGION, CliStrings.TOPIC_GEMFIRE_DATA})
public Result listIndex(@CliOption(key = CliStrings.LIST_INDEX__STATS,
                                   mandatory = false,
                                   specifiedDefaultValue = "true",
                                   unspecifiedDefaultValue = "false",
                                   help = CliStrings.LIST_INDEX__STATS__HELP)
                          final boolean showStats) {
  try {
    return toTabularResult(getIndexListing(), showStats);
  }
  catch (FunctionInvocationTargetException ignore) {
    return ResultBuilder.createGemFireErrorResult(CliStrings.format(CliStrings.COULD_NOT_EXECUTE_COMMAND_TRY_AGAIN,
      CliStrings.LIST_INDEX));
  }
  catch (VirtualMachineError e) {
    SystemFailure.initiateFailure(e);
    throw e;
  }
  catch (Throwable t) {
    SystemFailure.checkFailure();
    getCache().getLogger().error(t);
    return ResultBuilder.createGemFireErrorResult(String.format(CliStrings.LIST_INDEX__ERROR_MESSAGE,
      toString(t, isDebugging())));
  }
}
 
Example 7
Source Project: gemfirexd-oss   Source File: ShellCommands.java    License: Apache License 2.0 6 votes vote down vote up
@CliCommand(value = { CliStrings.ECHO }, help = CliStrings.ECHO__HELP)
@CliMetaData(shellOnly = true, relatedTopic = {CliStrings.TOPIC_GFSH})
public Result echo(
    @CliOption(key = {CliStrings.ECHO__STR, ""},
               unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE,
               specifiedDefaultValue = "",
               mandatory = true,
               help = CliStrings.ECHO__STR__HELP) String stringToEcho) {
  Result result = null;

  if(stringToEcho.equals("$*")){
    Gfsh gfshInstance = getGfsh();
    Map<String, String> envMap = gfshInstance.getEnv();
    Set< Entry<String, String> > setEnvMap = envMap.entrySet();
    TabularResultData  resultData = buildResultForEcho(setEnvMap);

    result = ResultBuilder.buildResult(resultData);
  } else {
    result = ResultBuilder.createInfoResult(stringToEcho);
  }

  return result;
}
 
Example 8
Source Project: hudi   Source File: TableCommand.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Create a Hoodie Table if it does not exist.
 *
 * @param path Base Path
 * @param name Hoodie Table Name
 * @param tableTypeStr Hoodie Table Type
 * @param payloadClass Payload Class
 */
@CliCommand(value = "create", help = "Create a hoodie table if not present")
public String createTable(
    @CliOption(key = {"path"}, mandatory = true, help = "Base Path of the table") final String path,
    @CliOption(key = {"tableName"}, mandatory = true, help = "Hoodie Table Name") final String name,
    @CliOption(key = {"tableType"}, unspecifiedDefaultValue = "COPY_ON_WRITE",
        help = "Hoodie Table Type. Must be one of : COPY_ON_WRITE or MERGE_ON_READ") final String tableTypeStr,
    @CliOption(key = {"archiveLogFolder"}, help = "Folder Name for storing archived timeline") String archiveFolder,
    @CliOption(key = {"layoutVersion"}, help = "Specific Layout Version to use") Integer layoutVersion,
    @CliOption(key = {"payloadClass"}, unspecifiedDefaultValue = "org.apache.hudi.common.model.HoodieAvroPayload",
        help = "Payload Class") final String payloadClass)
    throws IOException {

  boolean initialized = HoodieCLI.initConf();
  HoodieCLI.initFS(initialized);

  boolean existing = false;
  try {
    new HoodieTableMetaClient(HoodieCLI.conf, path);
    existing = true;
  } catch (TableNotFoundException dfe) {
    // expected
  }

  // Do not touch table that already exist
  if (existing) {
    throw new IllegalStateException("Table already existing in path : " + path);
  }

  final HoodieTableType tableType = HoodieTableType.valueOf(tableTypeStr);
  HoodieTableMetaClient.initTableType(HoodieCLI.conf, path, tableType, name, archiveFolder,
      payloadClass, layoutVersion);

  // Now connect to ensure loading works
  return connect(path, layoutVersion, false, 0, 0, 0);
}
 
Example 9
Source Project: hdfs-shell   Source File: ContextCommands.java    License: Apache License 2.0 5 votes vote down vote up
@CliCommand(value = "set", help = "Set switch value")
public String set(@CliOption(key = {""}, help = "showResultCodeON/showResultCodeOFF") String commandSwitch) {
    if (commandSwitch == null) {
        return "possible parameters .... showResultCodeON/showResultCodeOFF";
    }
    if (commandSwitch.startsWith("showResultCode")) {
        showResultCode = "showResultCodeON".equalsIgnoreCase(commandSwitch);
        return commandSwitch + " has been set";
    }
    return "Unknown switch " + commandSwitch;
}
 
Example 10
Source Project: hdfs-shell   Source File: EditCommands.java    License: Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("ResultOfMethodCallIgnored")
@CliCommand(value = "edit", help = "Get file to local file system, edit and put it back to HDFS")
public String set(@CliOption(key = {""}, help = "File to edit") String path) throws IOException {
    if (StringUtils.isEmpty(path)) {
        return "You have to define path param";
    }
    Path p = getFilePathForEdit(path);

    if (!contextCommands.getFileSystem().exists(p)) {
        return "Path " + p + " does not exists. Invalid file?";
    }

    final File localTempFile = getLocalTempFile(p.getName());
    try {
        final String getCommandResult = hadoopDfsCommands.runCommand("get", new String[]{p.toString(), localTempFile.getAbsolutePath()});
        if (StringUtils.isEmpty(getCommandResult)) {
            if (editFile(localTempFile)) {
                final String putCommandResult = hadoopDfsCommands.runCommand("put", new String[]{"-f", localTempFile.getAbsolutePath(), p.toString()});
                if (StringUtils.isEmpty(putCommandResult)) {
                    logger.info("File {} was updated successfully", p.getName());
                    return "File " + p.getName() + " was updated succesfully.";
                }
            } else {
                return "File " + p.getName() + " was NOT updated.";
            }
        }

    } catch (Exception e) {
        return "Failed to edit file: " + e.getMessage();
    } finally {
        localTempFile.delete();
    }


    return "";
}
 
Example 11
Source Project: hudi   Source File: CleansCommand.java    License: Apache License 2.0 5 votes vote down vote up
@CliCommand(value = "clean showpartitions", help = "Show partition level details of a clean")
public String showCleanPartitions(@CliOption(key = {"clean"}, help = "clean to show") final String instantTime,
    @CliOption(key = {"limit"}, help = "Limit commits", unspecifiedDefaultValue = "-1") final Integer limit,
    @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField,
    @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending,
    @CliOption(key = {"headeronly"}, help = "Print Header Only",
        unspecifiedDefaultValue = "false") final boolean headerOnly)
    throws Exception {

  HoodieActiveTimeline activeTimeline = HoodieCLI.getTableMetaClient().getActiveTimeline();
  HoodieTimeline timeline = activeTimeline.getCleanerTimeline().filterCompletedInstants();
  HoodieInstant cleanInstant = new HoodieInstant(false, HoodieTimeline.CLEAN_ACTION, instantTime);

  if (!timeline.containsInstant(cleanInstant)) {
    return "Clean " + instantTime + " not found in metadata " + timeline;
  }

  HoodieCleanMetadata cleanMetadata =
      TimelineMetadataUtils.deserializeHoodieCleanMetadata(timeline.getInstantDetails(cleanInstant).get());
  List<Comparable[]> rows = new ArrayList<>();
  for (Map.Entry<String, HoodieCleanPartitionMetadata> entry : cleanMetadata.getPartitionMetadata().entrySet()) {
    String path = entry.getKey();
    HoodieCleanPartitionMetadata stats = entry.getValue();
    String policy = stats.getPolicy();
    int totalSuccessDeletedFiles = stats.getSuccessDeleteFiles().size();
    int totalFailedDeletedFiles = stats.getFailedDeleteFiles().size();
    rows.add(new Comparable[] {path, policy, totalSuccessDeletedFiles, totalFailedDeletedFiles});
  }

  TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_PARTITION_PATH)
      .addTableHeaderField(HoodieTableHeaderFields.HEADER_CLEANING_POLICY)
      .addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_FILES_SUCCESSFULLY_DELETED)
      .addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_FAILED_DELETIONS);
  return HoodiePrintHelper.print(header, new HashMap<>(), sortByField, descending, limit, headerOnly, rows);

}
 
Example 12
Source Project: gemfirexd-oss   Source File: CommandManagerJUnitTest.java    License: Apache License 2.0 5 votes vote down vote up
@CliCommand(value = { COMMAND1_NAME, COMMAND1_NAME_ALIAS }, help = COMMAND1_HELP)
@CliMetaData(shellOnly = true, relatedTopic = { "relatedTopicOfCommand1" })
public static String command1(
    @CliArgument(name = ARGUMENT1_NAME, argumentContext = ARGUMENT1_CONTEXT, help = ARGUMENT1_HELP, mandatory = true)
    String argument1,
    @CliArgument(name = ARGUEMNT2_NAME, argumentContext = ARGUMENT2_CONTEXT, help = ARGUMENT2_HELP, mandatory = false, unspecifiedDefaultValue = ARGUMENT2_UNSPECIFIED_DEFAULT_VALUE, systemProvided = false)
    String argument2,
    @CliOption(key = { OPTION1_NAME, OPTION1_SYNONYM }, help = OPTION1_HELP, mandatory = true, optionContext = OPTION1_CONTEXT, specifiedDefaultValue = OPTION1_SPECIFIED_DEFAULT_VALUE)
    String option1,
    @CliOption(key = { OPTION2_NAME }, help = OPTION2_HELP, mandatory = false, optionContext = OPTION2_CONTEXT, specifiedDefaultValue = OPTION2_SPECIFIED_DEFAULT_VALUE)
    String option2,
    @CliOption(key = { OPTION3_NAME, OPTION3_SYNONYM }, help = OPTION3_HELP, mandatory = false, optionContext = OPTION3_CONTEXT, unspecifiedDefaultValue = OPTION3_UNSPECIFIED_DEFAULT_VALUE, specifiedDefaultValue = OPTION3_SPECIFIED_DEFAULT_VALUE)
    String option3) {
  return null;
}
 
Example 13
Source Project: hudi   Source File: SavepointsCommand.java    License: Apache License 2.0 5 votes vote down vote up
@CliCommand(value = "savepoint delete", help = "Delete the savepoint")
public String deleteSavepoint(@CliOption(key = {"commit"}, help = "Delete a savepoint") final String instantTime,
    @CliOption(key = {"sparkProperties"}, help = "Spark Properties File Path") final String sparkPropertiesPath,
    @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "", help = "Spark Master") String master,
    @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "4G",
        help = "Spark executor memory") final String sparkMemory)
    throws Exception {
  HoodieTableMetaClient metaClient = HoodieCLI.getTableMetaClient();
  HoodieTimeline completedInstants = metaClient.getActiveTimeline().getSavePointTimeline().filterCompletedInstants();
  if (completedInstants.empty()) {
    throw new HoodieException("There are no completed savepoint to run delete");
  }
  HoodieInstant savePoint = new HoodieInstant(false, HoodieTimeline.SAVEPOINT_ACTION, instantTime);

  if (!completedInstants.containsInstant(savePoint)) {
    return "Commit " + instantTime + " not found in Commits " + completedInstants;
  }

  SparkLauncher sparkLauncher = SparkUtil.initLauncher(sparkPropertiesPath);
  sparkLauncher.addAppArgs(SparkMain.SparkCommand.DELETE_SAVEPOINT.toString(), master, sparkMemory, instantTime,
      metaClient.getBasePath());
  Process process = sparkLauncher.launch();
  InputStreamConsumer.captureOutput(process);
  int exitCode = process.waitFor();
  // Refresh the current
  HoodieCLI.refreshTableMetadata();
  if (exitCode != 0) {
    return String.format("Failed: Could not delete savepoint \"%s\".", instantTime);
  }
  return String.format("Savepoint \"%s\" deleted.", instantTime);
}
 
Example 14
Source Project: gemfirexd-oss   Source File: CommandManager.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Creates a new {@link Option} instance
 * 
 * @param cliOption
 * @param parameterType
 * @param parameterNo
 * @return Option
 */
public Option createOption(CliOption cliOption, Class<?> parameterType,
    int parameterNo) {
  Option option = new Option();

  // First set the Option identifiers
  List<String> synonyms = new ArrayList<String>();
  for (String string : cliOption.key()) {
    if (!option.setLongOption(string)) {
      synonyms.add(string);
    }
  }
  option.setSynonyms(synonyms);
  if (!(option.getAggregate().size() > 0)) {
    logWrapper.warning("Option should have a name");
  }
  // Set the option Help
  option.setHelp(cliOption.help());

  // Set whether the option is required or not
  option.setRequired(cliOption.mandatory());

  // Set the fields related to option value
  option.setSystemProvided(cliOption.systemProvided());
  option.setSpecifiedDefaultValue(cliOption.specifiedDefaultValue());
  option.setUnspecifiedDefaultValue(cliOption.unspecifiedDefaultValue());

  // Set the things which are useful for value conversion and
  // auto-completion
  option.setContext(cliOption.optionContext());
  // Find the matching Converter<?> for this option
  option.setConverter(getConverter(parameterType, option.getContext()));

  option.setDataType(parameterType);
  option.setParameterNo(parameterNo);
  return option;
}
 
Example 15
Source Project: gemfirexd-oss   Source File: CommandManagerJUnitTest.java    License: Apache License 2.0 5 votes vote down vote up
@CliCommand(value = { "testParamConcat" })
public static Result testParamConcat(
    @CliOption(key = { "string" })
    String string,
    @CliOption(key = { "stringArray" })
    @CliMetaData(valueSeparator = ",")
    String[] stringArray,
    @CliOption(key = { "stringList" }, optionContext = ConverterHint.STRING_LIST)
    @CliMetaData(valueSeparator = ",")
    List<String> stringList, @CliOption(key = { "integer" })
    Integer integer, @CliOption(key = { "colonArray" })
    @CliMetaData(valueSeparator = ":")
    String[] colonArray) {
  return null;
}
 
Example 16
Source Project: gemfirexd-oss   Source File: ShellCommands.java    License: Apache License 2.0 5 votes vote down vote up
@CliCommand(value = CliStrings.ENCRYPT, help = CliStrings.ENCRYPT__HELP)
@CliMetaData(shellOnly = true, relatedTopic = {CliStrings.TOPIC_GEMFIRE_DEBUG_UTIL})
public Result encryptPassword(
    @CliOption(key = CliStrings.ENCRYPT_STRING,
               help = CliStrings.ENCRYPT_STRING__HELP,
               mandatory = true)
               String stringToEncrypt) {
  return ResultBuilder.createInfoResult(PasswordUtil.encrypt(stringToEncrypt, false/*echo*/));
}
 
Example 17
Source Project: hudi   Source File: RollbacksCommand.java    License: Apache License 2.0 5 votes vote down vote up
@CliCommand(value = "show rollback", help = "Show details of a rollback instant")
public String showRollback(
    @CliOption(key = {"instant"}, help = "Rollback instant", mandatory = true) String rollbackInstant,
    @CliOption(key = {"limit"}, help = "Limit  #rows to be displayed", unspecifiedDefaultValue = "10") Integer limit,
    @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField,
    @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending,
    @CliOption(key = {"headeronly"}, help = "Print Header Only",
        unspecifiedDefaultValue = "false") final boolean headerOnly)
    throws IOException {
  HoodieActiveTimeline activeTimeline = new RollbackTimeline(HoodieCLI.getTableMetaClient());
  final List<Comparable[]> rows = new ArrayList<>();
  HoodieRollbackMetadata metadata = TimelineMetadataUtils.deserializeAvroMetadata(
      activeTimeline.getInstantDetails(new HoodieInstant(State.COMPLETED, ROLLBACK_ACTION, rollbackInstant)).get(),
      HoodieRollbackMetadata.class);
  metadata.getPartitionMetadata().forEach((key, value) -> Stream
          .concat(value.getSuccessDeleteFiles().stream().map(f -> Pair.of(f, true)),
                  value.getFailedDeleteFiles().stream().map(f -> Pair.of(f, false)))
          .forEach(fileWithDeleteStatus -> {
            Comparable[] row = new Comparable[5];
            row[0] = metadata.getStartRollbackTime();
            row[1] = metadata.getCommitsRollback().toString();
            row[2] = key;
            row[3] = fileWithDeleteStatus.getLeft();
            row[4] = fileWithDeleteStatus.getRight();
            rows.add(row);
          }));

  TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_INSTANT)
      .addTableHeaderField(HoodieTableHeaderFields.HEADER_ROLLBACK_INSTANT)
      .addTableHeaderField(HoodieTableHeaderFields.HEADER_PARTITION)
      .addTableHeaderField(HoodieTableHeaderFields.HEADER_DELETED_FILE)
      .addTableHeaderField(HoodieTableHeaderFields.HEADER_SUCCEEDED);
  return HoodiePrintHelper.print(header, new HashMap<>(), sortByField, descending, limit, headerOnly, rows);
}
 
Example 18
@CliCommand(value = { "testParamConcat" })
public static Result testParamConcat(
    @CliOption(key = { "string" })
    String string,
    @CliOption(key = { "stringArray" })
    @CliMetaData(valueSeparator = ",")
    String[] stringArray,
    @CliOption(key = { "stringList" }, optionContext = ConverterHint.STRING_LIST)
    @CliMetaData(valueSeparator = ",")
    List<String> stringList, @CliOption(key = { "integer" })
    Integer integer, @CliOption(key = { "colonArray" })
    @CliMetaData(valueSeparator = ":")
    String[] colonArray) {
  return null;
}
 
Example 19
Source Project: hdfs-shell   Source File: HadoopDfsCommands.java    License: Apache License 2.0 5 votes vote down vote up
@CliCommand(value = {"ls", "hdfs dfs -ls"}, help = "List the contents that match the specified file pattern.")
public String ls(
        @CliOption(key = {""}, help = "List the contents that match the specified file pattern.", specifiedDefaultValue = "", unspecifiedDefaultValue = "") String path
) {
    if (StringUtils.isEmpty(path)) {
        path = null;
    }
    return runCommand("ls", path);
}
 
Example 20
Source Project: hdfs-shell   Source File: HadoopDfsCommands.java    License: Apache License 2.0 5 votes vote down vote up
@CliCommand(value = {"lsr", "hdfs dfs -lsr"}, help = "(DEPRECATED) Same as 'ls -R'.")
public String lsr(
        @CliOption(key = {""}, help = "(DEPRECATED) Same as 'ls -R'.") String path
) {
    if (StringUtils.isEmpty(path)) {
        path = null;
    }
    return runCommand("lsr", path);
}
 
Example 21
Source Project: hudi   Source File: CommitsCommand.java    License: Apache License 2.0 5 votes vote down vote up
@CliCommand(value = "commit rollback", help = "Rollback a commit")
public String rollbackCommit(@CliOption(key = {"commit"}, help = "Commit to rollback") final String instantTime,
    @CliOption(key = {"sparkProperties"}, help = "Spark Properties File Path") final String sparkPropertiesPath,
    @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "", help = "Spark Master") String master,
    @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "4G",
       help = "Spark executor memory") final String sparkMemory)
    throws Exception {
  HoodieActiveTimeline activeTimeline = HoodieCLI.getTableMetaClient().getActiveTimeline();
  HoodieTimeline completedTimeline = activeTimeline.getCommitsTimeline().filterCompletedInstants();
  HoodieTimeline filteredTimeline = completedTimeline.filter(instant -> instant.getTimestamp().equals(instantTime));
  if (filteredTimeline.empty()) {
    return "Commit " + instantTime + " not found in Commits " + completedTimeline;
  }

  SparkLauncher sparkLauncher = SparkUtil.initLauncher(sparkPropertiesPath);
  sparkLauncher.addAppArgs(SparkMain.SparkCommand.ROLLBACK.toString(), master, sparkMemory, instantTime,
      HoodieCLI.getTableMetaClient().getBasePath());
  Process process = sparkLauncher.launch();
  InputStreamConsumer.captureOutput(process);
  int exitCode = process.waitFor();
  // Refresh the current
  HoodieCLI.refreshTableMetadata();
  if (exitCode != 0) {
    return "Commit " + instantTime + " failed to roll back";
  }
  return "Commit " + instantTime + " rolled back";
}
 
Example 22
Source Project: hudi   Source File: SavepointsCommand.java    License: Apache License 2.0 5 votes vote down vote up
@CliCommand(value = "savepoint rollback", help = "Savepoint a commit")
public String rollbackToSavepoint(
    @CliOption(key = {"savepoint"}, help = "Savepoint to rollback") final String instantTime,
    @CliOption(key = {"sparkProperties"}, help = "Spark Properties File Path") final String sparkPropertiesPath,
    @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "", help = "Spark Master") String master,
    @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "4G",
        help = "Spark executor memory") final String sparkMemory)
    throws Exception {
  HoodieTableMetaClient metaClient = HoodieCLI.getTableMetaClient();
  if (metaClient.getActiveTimeline().getSavePointTimeline().filterCompletedInstants().empty()) {
    throw new HoodieException("There are no completed instants to run rollback");
  }
  HoodieActiveTimeline activeTimeline = metaClient.getActiveTimeline();
  HoodieTimeline timeline = activeTimeline.getCommitTimeline().filterCompletedInstants();
  HoodieInstant commitInstant = new HoodieInstant(false, HoodieTimeline.COMMIT_ACTION, instantTime);

  if (!timeline.containsInstant(commitInstant)) {
    return "Commit " + instantTime + " not found in Commits " + timeline;
  }

  SparkLauncher sparkLauncher = SparkUtil.initLauncher(sparkPropertiesPath);
  sparkLauncher.addAppArgs(SparkMain.SparkCommand.ROLLBACK_TO_SAVEPOINT.toString(), master, sparkMemory,
      instantTime, metaClient.getBasePath());
  Process process = sparkLauncher.launch();
  InputStreamConsumer.captureOutput(process);
  int exitCode = process.waitFor();
  // Refresh the current
  HoodieCLI.refreshTableMetadata();
  if (exitCode != 0) {
    return String.format("Savepoint \"%s\" failed to roll back", instantTime);
  }
  return String.format("Savepoint \"%s\" rolled back", instantTime);
}
 
Example 23
Source Project: hudi   Source File: CleansCommand.java    License: Apache License 2.0 5 votes vote down vote up
@CliCommand(value = "cleans show", help = "Show the cleans")
public String showCleans(
    @CliOption(key = {"limit"}, help = "Limit commits", unspecifiedDefaultValue = "-1") final Integer limit,
    @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField,
    @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending,
    @CliOption(key = {"headeronly"}, help = "Print Header Only",
        unspecifiedDefaultValue = "false") final boolean headerOnly)
    throws IOException {

  HoodieActiveTimeline activeTimeline = HoodieCLI.getTableMetaClient().getActiveTimeline();
  HoodieTimeline timeline = activeTimeline.getCleanerTimeline().filterCompletedInstants();
  List<HoodieInstant> cleans = timeline.getReverseOrderedInstants().collect(Collectors.toList());
  List<Comparable[]> rows = new ArrayList<>();
  for (HoodieInstant clean : cleans) {
    HoodieCleanMetadata cleanMetadata =
            TimelineMetadataUtils.deserializeHoodieCleanMetadata(timeline.getInstantDetails(clean).get());
    rows.add(new Comparable[]{clean.getTimestamp(), cleanMetadata.getEarliestCommitToRetain(),
            cleanMetadata.getTotalFilesDeleted(), cleanMetadata.getTimeTakenInMillis()});
  }

  TableHeader header =
      new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_CLEAN_TIME)
          .addTableHeaderField(HoodieTableHeaderFields.HEADER_EARLIEST_COMMAND_RETAINED)
          .addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_FILES_DELETED)
          .addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_TIME_TAKEN);
  return HoodiePrintHelper.print(header, new HashMap<>(), sortByField, descending, limit, headerOnly, rows);
}
 
Example 24
Source Project: hdfs-shell   Source File: HadoopDfsCommands.java    License: Apache License 2.0 5 votes vote down vote up
@CliCommand(value = {"setfacl", "hdfs dfs -setfacl"}, help = "Sets Access Control Lists (ACLs) of files and directories.")
public String setfacl(
        @CliOption(key = {""}, help = "Sets Access Control Lists (ACLs) of files and directories.") String path
) {

    return runSetFaclCommand("setfacl", path);
}
 
Example 25
Source Project: gemfirexd-oss   Source File: GfshParserTest.java    License: Apache License 2.0 5 votes vote down vote up
@CliCommand(value = { "testParamConcat" })
public static Result testParamConcat(
    @CliOption(key = { "string" }) String string,
    @CliOption(key = { "stringArray" }) @CliMetaData(valueSeparator = ",") String[] stringArray,
    @CliOption(key = { "stringList" }, optionContext = ConverterHint.STRING_LIST) @CliMetaData(valueSeparator = ",") List<String> stringList,
    @CliOption(key = { "integer" }) Integer integer,
    @CliOption(key = { "colonArray" }) @CliMetaData(valueSeparator = ":") String[] colonArray) {
  return null;
}
 
Example 26
Source Project: hudi   Source File: CommitsCommand.java    License: Apache License 2.0 5 votes vote down vote up
@CliCommand(value = "commits showarchived", help = "Show the archived commits")
public String showArchivedCommits(
        @CliOption(key = {"includeExtraMetadata"}, help = "Include extra metadata",
                unspecifiedDefaultValue = "false") final boolean includeExtraMetadata,
        @CliOption(key = {"createView"}, mandatory = false, help = "view name to store output table",
                unspecifiedDefaultValue = "") final String exportTableName,
        @CliOption(key = {"startTs"},  mandatory = false, help = "start time for commits, default: now - 10 days")
        String startTs,
        @CliOption(key = {"endTs"},  mandatory = false, help = "end time for commits, default: now - 1 day")
        String endTs,
        @CliOption(key = {"limit"}, mandatory = false, help = "Limit commits", unspecifiedDefaultValue = "-1")
        final Integer limit,
        @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "")
        final String sortByField,
        @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false")
        final boolean descending,
        @CliOption(key = {"headeronly"}, help = "Print Header Only", unspecifiedDefaultValue = "false")
        final boolean headerOnly)
        throws IOException {
  if (StringUtils.isNullOrEmpty(startTs)) {
    startTs = CommitUtil.getTimeDaysAgo(10);
  }
  if (StringUtils.isNullOrEmpty(endTs)) {
    endTs = CommitUtil.getTimeDaysAgo(1);
  }
  HoodieArchivedTimeline archivedTimeline = HoodieCLI.getTableMetaClient().getArchivedTimeline();
  try {
    archivedTimeline.loadInstantDetailsInMemory(startTs, endTs);
    HoodieDefaultTimeline timelineRange = archivedTimeline.findInstantsInRange(startTs, endTs);
    if (includeExtraMetadata) {
      return printCommitsWithMetadata(timelineRange, limit, sortByField, descending, headerOnly, exportTableName);
    } else  {
      return printCommits(timelineRange, limit, sortByField, descending, headerOnly, exportTableName);
    }
  } finally {
    // clear the instant details from memory after printing to reduce usage
    archivedTimeline.clearInstantDetailsFromMemory(startTs, endTs);
  }
}
 
Example 27
@CliCommand(value = UNREGISTER_APPLICATION, help = "Unregister an application")
public String unregister(
		@CliOption(mandatory = true,
				key = {"", "name"},
				help = "name of the application to unregister")
				String name,
		@CliOption(mandatory = true,
				key = {"type"},
				help = "type of the application to unregister")
				String type) {

	appRegistryOperations().unregister(name, type);
	return String.format(("Successfully unregistered application '%s' with type %s"),
			name, type);
}
 
Example 28
Source Project: hudi   Source File: UtilsCommand.java    License: Apache License 2.0 5 votes vote down vote up
@CliCommand(value = "utils loadClass", help = "Load a class")
public String loadClass(@CliOption(key = {"class"}, help = "Check mode") final String clazz) {
  if (StringUtils.isNullOrEmpty(clazz)) {
    return "Class to be loaded can not be null!";
  }
  try {
    Class klass = Class.forName(clazz);
    return klass.getProtectionDomain().getCodeSource().getLocation().toExternalForm();
  } catch (ClassNotFoundException e) {
    return String.format("Class %s not found!", clazz);
  }
}
 
Example 29
Source Project: hudi   Source File: RepairsCommand.java    License: Apache License 2.0 5 votes vote down vote up
@CliCommand(value = "repair deduplicate",
    help = "De-duplicate a partition path contains duplicates & produce repaired files to replace with")
public String deduplicate(
    @CliOption(key = {"duplicatedPartitionPath"}, help = "Partition Path containing the duplicates",
        mandatory = true) final String duplicatedPartitionPath,
    @CliOption(key = {"repairedOutputPath"}, help = "Location to place the repaired files",
        mandatory = true) final String repairedOutputPath,
    @CliOption(key = {"sparkProperties"}, help = "Spark Properties File Path",
        unspecifiedDefaultValue = "") String sparkPropertiesPath,
    @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "", help = "Spark Master") String master,
    @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "4G",
        help = "Spark executor memory") final String sparkMemory,
    @CliOption(key = {"dryrun"},
        help = "Should we actually remove duplicates or just run and store result to repairedOutputPath",
        unspecifiedDefaultValue = "true") final boolean dryRun)
    throws Exception {
  if (StringUtils.isNullOrEmpty(sparkPropertiesPath)) {
    sparkPropertiesPath =
        Utils.getDefaultPropertiesFile(JavaConverters.mapAsScalaMapConverter(System.getenv()).asScala());
  }

  SparkLauncher sparkLauncher = SparkUtil.initLauncher(sparkPropertiesPath);
  sparkLauncher.addAppArgs(SparkMain.SparkCommand.DEDUPLICATE.toString(), master, sparkMemory,
      duplicatedPartitionPath, repairedOutputPath, HoodieCLI.getTableMetaClient().getBasePath(),
      String.valueOf(dryRun));
  Process process = sparkLauncher.launch();
  InputStreamConsumer.captureOutput(process);
  int exitCode = process.waitFor();

  if (exitCode != 0) {
    return "Deduplication failed!";
  }
  if (dryRun) {
    return DEDUPLICATE_RETURN_PREFIX + repairedOutputPath;
  } else {
    return DEDUPLICATE_RETURN_PREFIX + duplicatedPartitionPath;
  }
}
 
Example 30
@CliCommand(value = UNDEPLOY_APPLICATION_ALL, help = "Un-deploy all previously deployed applications")
public String undeployAllApplications(
		@CliOption(key = "force", help = "bypass confirmation prompt", unspecifiedDefaultValue = "false", specifiedDefaultValue = "true") boolean force
		) {
	if (force || "y".equalsIgnoreCase(userInput.promptWithOptions("Really undeploy all applications?", "n", "y", "n"))) {
		applicationOperations().undeployAll();
		return String.format("Un-deployed all the applications");
	}
	else {
		return "";
	}
}