Java Code Examples for org.elasticsearch.rest.RestRequest#paramAsBoolean()

The following examples show how to use org.elasticsearch.rest.RestRequest#paramAsBoolean() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RestGetAnomalyDetectorAction.java    From anomaly-detection with Apache License 2.0 6 votes vote down vote up
@Override
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
    if (!EnabledSetting.isADPluginEnabled()) {
        throw new IllegalStateException(CommonErrorMessages.DISABLED_ERR_MSG);
    }
    String detectorId = request.param(DETECTOR_ID);
    String typesStr = request.param(TYPE);
    String rawPath = request.rawPath();
    if (!Strings.isEmpty(typesStr) || rawPath.endsWith(PROFILE) || rawPath.endsWith(PROFILE + "/")) {
        boolean all = request.paramAsBoolean("_all", false);
        return channel -> profileRunner
            .profile(detectorId, getProfileActionListener(channel, detectorId), getProfilesToCollect(typesStr, all));
    } else {
        boolean returnJob = request.paramAsBoolean("job", false);
        MultiGetRequest.Item adItem = new MultiGetRequest.Item(ANOMALY_DETECTORS_INDEX, detectorId)
            .version(RestActions.parseVersion(request));
        MultiGetRequest multiGetRequest = new MultiGetRequest().add(adItem);
        if (returnJob) {
            MultiGetRequest.Item adJobItem = new MultiGetRequest.Item(ANOMALY_DETECTOR_JOB_INDEX, detectorId)
                .version(RestActions.parseVersion(request));
            multiGetRequest.add(adJobItem);
        }

        return channel -> client.multiGet(multiGetRequest, onMultiGetResponse(channel, returnJob, detectorId));
    }
}
 
Example 2
Source File: HomeAction.java    From zentity with Apache License 2.0 6 votes vote down vote up
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) {

    Properties props = ZentityPlugin.properties();
    Boolean pretty = restRequest.paramAsBoolean("pretty", false);

    return channel -> {
        XContentBuilder content = XContentFactory.jsonBuilder();
        if (pretty)
            content.prettyPrint();
        content.startObject();
        content.field("name", props.getProperty("name"));
        content.field("description", props.getProperty("description"));
        content.field("website", props.getProperty("zentity.website"));
        content.startObject("version");
        content.field("zentity", props.getProperty("zentity.version"));
        content.field("elasticsearch", props.getProperty("elasticsearch.version"));
        content.endObject();
        content.endObject();
        channel.sendResponse(new BytesRestResponse(RestStatus.OK, content));
    };
}
 
Example 3
Source File: RestListTasksAction.java    From Elasticsearch with Apache License 2.0 6 votes vote down vote up
@Override
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) {
    boolean detailed = request.paramAsBoolean("detailed", false);
    String[] nodesIds = Strings.splitStringByCommaToArray(request.param("node_id"));
    TaskId taskId = new TaskId(request.param("taskId"));
    String[] actions = Strings.splitStringByCommaToArray(request.param("actions"));
    TaskId parentTaskId = new TaskId(request.param("parent_task_id"));
    boolean waitForCompletion = request.paramAsBoolean("wait_for_completion", false);
    TimeValue timeout = request.paramAsTime("timeout", null);

    ListTasksRequest listTasksRequest = new ListTasksRequest();
    listTasksRequest.setTaskId(taskId);
    listTasksRequest.setNodesIds(nodesIds);
    listTasksRequest.setDetailed(detailed);
    listTasksRequest.setActions(actions);
    listTasksRequest.setParentTaskId(parentTaskId);
    listTasksRequest.setWaitForCompletion(waitForCompletion);
    listTasksRequest.setTimeout(timeout);
    client.admin().cluster().listTasks(listTasksRequest, new RestToXContentListener<ListTasksResponse>(channel));
}
 
Example 4
Source File: SetupAction.java    From zentity with Apache License 2.0 5 votes vote down vote up
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) {

    // Parse request
    Boolean pretty = restRequest.paramAsBoolean("pretty", false);
    int numberOfShards = restRequest.paramAsInt("number_of_shards", 1);
    int numberOfReplicas = restRequest.paramAsInt("number_of_replicas", 1);
    Method method = restRequest.method();

    return channel -> {
        try {
            if (method == POST) {

                createIndex(client, numberOfShards, numberOfReplicas);
                XContentBuilder content = XContentFactory.jsonBuilder();
                if (pretty)
                    content.prettyPrint();
                content.startObject().field("acknowledged", true).endObject();
                channel.sendResponse(new BytesRestResponse(RestStatus.OK, content));

            } else {
                throw new NotImplementedException("Method and endpoint not implemented.");
            }
        } catch (NotImplementedException e) {
            channel.sendResponse(new BytesRestResponse(channel, RestStatus.NOT_IMPLEMENTED, e));
        }
    };
}
 
Example 5
Source File: FetchSourceContext.java    From Elasticsearch with Apache License 2.0 5 votes vote down vote up
public static FetchSourceContext parseFromRestRequest(RestRequest request) {
    Boolean fetchSource = null;
    String[] source_excludes = null;
    String[] source_includes = null;

    String source = request.param("_source");
    if (source != null) {
        if (Booleans.isExplicitTrue(source)) {
            fetchSource = true;
        } else if (Booleans.isExplicitFalse(source)) {
            fetchSource = false;
        } else {
            source_includes = Strings.splitStringByCommaToArray(source);
        }
    }
    String sIncludes = request.param("_source_includes");
    sIncludes = request.param("_source_include", sIncludes);
    if (sIncludes != null) {
        source_includes = Strings.splitStringByCommaToArray(sIncludes);
    }

    String sExcludes = request.param("_source_excludes");
    sExcludes = request.param("_source_exclude", sExcludes);
    if (sExcludes != null) {
        source_excludes = Strings.splitStringByCommaToArray(sExcludes);
    }

    boolean transform = request.paramAsBoolean("_source_transform", false);

    if (fetchSource != null || source_includes != null || source_excludes != null || transform) {
        return new FetchSourceContext(fetchSource == null ? true : fetchSource, source_includes, source_excludes, transform);
    }
    return null;
}
 
Example 6
Source File: RestAddFeatureToSet.java    From elasticsearch-learning-to-rank with Apache License 2.0 5 votes vote down vote up
@Override
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
    String store = indexName(request);
    String setName = request.param("name");
    String routing = request.param("routing");
    String featureQuery = null;
    List<StoredFeature> features = null;
    boolean merge = request.paramAsBoolean("merge", false);
    if (request.hasParam("query")) {
        featureQuery = request.param("query");
    }
    FeatureValidation validation = null;
    if (request.hasContentOrSourceParam()) {
        FeaturesParserState featuresParser = new FeaturesParserState();
        request.applyContentParser(featuresParser::parse);
        features = featuresParser.features;
        validation = featuresParser.validation;
    }
    if (featureQuery == null && (features == null || features.isEmpty())) {
        throw new IllegalArgumentException("features must be provided as a query for the feature store " +
                "or in the body, none provided");
    }

    if (featureQuery != null && (features != null && !features.isEmpty())) {
        throw new IllegalArgumentException("features must be provided as a query for the feature store " +
                "or directly in the body not both");
    }

    AddFeaturesToSetRequestBuilder builder = new AddFeaturesToSetRequestBuilder(client);
    builder.request().setStore(store);
    builder.request().setFeatureSet(setName);
    builder.request().setFeatureNameQuery(featureQuery);
    builder.request().setRouting(routing);
    builder.request().setFeatures(features);
    builder.request().setMerge(merge);
    builder.request().setValidation(validation);
    return (channel) -> builder.execute(new RestStatusToXContentListener<>(channel, (r) -> r.getResponse().getLocation(routing)));
}
 
Example 7
Source File: XlsContent.java    From elasticsearch-dataformat with Apache License 2.0 5 votes vote down vote up
public XlsContent(final Client client, final RestRequest request, final ContentType contentType, final boolean isExcel2007) {
    super(client, request, contentType);

    appendHeader = request.paramAsBoolean("append.header", true);
    String fieldsName = "fields_name";
    if (request.hasParam("fl")) {
        fieldsName = "fl";
    }
    final String[] fields = request.paramAsStringArray(fieldsName,
            StringUtils.EMPTY_STRINGS);
    if (fields.length == 0) {
        headerSet = new LinkedHashSet<>();
        modifiableFieldSet = true;
    } else {
        final Set<String> fieldSet = new LinkedHashSet<>();
        for (final String field : fields) {
            fieldSet.add(field.trim());
        }
        headerSet = Collections.unmodifiableSet(fieldSet);
        modifiableFieldSet = false;
    }

    this.isExcel2007 = isExcel2007;

    if (logger.isDebugEnabled()) {
        logger.debug("appendHeader: {}, headerSet: {}, isExcel2007: {}",
                appendHeader, headerSet, isExcel2007);
    }
}
 
Example 8
Source File: KafkaStreamRestHandler.java    From elasticsearch-rest-command with The Unlicense 4 votes vote down vote up
@Override
protected void handleRequest(RestRequest request, RestChannel channel, Client client)
		throws Exception {
	final String topic = request.param("topic", "");
	final boolean schema = request.paramAsBoolean("schema", false);
	final String master = request.param("masterAddress", "local");
	final String hdfs =  request.param("hdfs", "hdfs://localhost:50070");
	final String memory =  request.param("memory", "2g");
	final String appName = request.param("appName", "appName-"+topic);
	final int duration = request.paramAsInt("duration", 1000);
	
	Thread exec = new Thread(new Runnable(){

		@Override
		public void run() {
		
			SparkConf sparkConf = new SparkConf().setAppName(appName).setMaster(master).set("spark.executor.memory", memory);
			JavaStreamingContext jssc = new JavaStreamingContext(sparkConf, new Duration(duration));
			
			Map<String, Integer> topicMap = new HashMap<String, Integer>();
			topicMap.put(topic, 3);
			
			JavaPairReceiverInputDStream<String, byte[]> kafkaStream = KafkaUtils.createStream(jssc, String.class, byte[].class, 
						kafka.serializer.DefaultDecoder.class, kafka.serializer.DefaultDecoder.class, null, 
						topicMap,  StorageLevel.MEMORY_ONLY());
	
			//JobConf confHadoop = new JobConf();
			//confHadoop.set("mapred.output.compress", "true");
			//confHadoop.set("mapred.output.compression.codec", "com.hadoop.compression.lzo.LzopCodec");
	
			kafkaStream.saveAsHadoopFiles(hdfs, "seq", Text.class, BytesWritable.class, KafkaStreamSeqOutputFormat.class);
			
			topicContextMap.put(topic, jssc);
			jssc.start();		
			jssc.awaitTermination();
			
		}
	});
	
	exec.start();
	
	channel.sendResponse(new BytesRestResponse(RestStatus.OK, String.format("{\"topic\":\"%s\"}",  topic)));
	
	
}
 
Example 9
Source File: CsvContent.java    From elasticsearch-dataformat with Apache License 2.0 4 votes vote down vote up
public CsvContent(final Client client, final RestRequest request, final ContentType contentType) {
    super(client, request, contentType);
    csvConfig = new CsvConfig(
            request.param("csv.separator", ",").charAt(0), request.param(
            "csv.quote", "\"").charAt(0), request.param(
            "csv.escape", "\"").charAt(0));
    csvConfig.setQuoteDisabled(request.paramAsBoolean("csv.quoteDisabled",
            false));
    csvConfig.setEscapeDisabled(request.paramAsBoolean(
            "csv.escapeDisabled", false));
    csvConfig.setNullString(request.param("csv.nullString", ""));
    csvConfig.setIgnoreLeadingWhitespaces(request.paramAsBoolean(
            "csv.ignoreLeadingWhitespaces", true));
    csvConfig.setIgnoreTrailingWhitespaces(request.paramAsBoolean(
            "csv.ignoreTrailingWhitespaces", true));

    appendHeader = request.paramAsBoolean("append.header", true);
    charsetName = request.param("csv.encoding", "UTF-8");

    String fields_name = "fields_name";
    if (request.hasParam("fl")) {
        fields_name = "fl";
    }
    final String[] fields = request.paramAsStringArray(fields_name,
            StringUtils.EMPTY_STRINGS);
    if (fields.length == 0) {
        headerSet = new LinkedHashSet<>();
        modifiableFieldSet = true;
    } else {
        final Set<String> fieldSet = new LinkedHashSet<>();
        for (final String field : fields) {
            fieldSet.add(field.trim());
        }
        headerSet = Collections.unmodifiableSet(fieldSet);
        modifiableFieldSet = false;
    }

    if (logger.isDebugEnabled()) {
        logger.debug("CsvConfig: {}, appendHeader: {}, charsetName: {}, headerSet: {}",
                csvConfig, appendHeader, charsetName, headerSet);
    }
}