Java Code Examples for org.elasticsearch.common.unit.TimeValue#getMillis()

The following examples show how to use org.elasticsearch.common.unit.TimeValue#getMillis() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DecayFunctionParser.java    From Elasticsearch with Apache License 2.0 5 votes vote down vote up
private AbstractDistanceScoreFunction parseDateVariable(String fieldName, XContentParser parser, QueryParseContext parseContext,
        DateFieldMapper.DateFieldType dateFieldType, MultiValueMode mode) throws IOException {
    XContentParser.Token token;
    String parameterName = null;
    String scaleString = null;
    String originString = null;
    String offsetString = "0d";
    double decay = 0.5;
    while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
        if (token == XContentParser.Token.FIELD_NAME) {
            parameterName = parser.currentName();
        } else if (parameterName.equals(DecayFunctionBuilder.SCALE)) {
            scaleString = parser.text();
        } else if (parameterName.equals(DecayFunctionBuilder.ORIGIN)) {
            originString = parser.text();
        } else if (parameterName.equals(DecayFunctionBuilder.DECAY)) {
            decay = parser.doubleValue();
        } else if (parameterName.equals(DecayFunctionBuilder.OFFSET)) {
            offsetString = parser.text();
        } else {
            throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName);
        }
    }
    long origin = SearchContext.current().nowInMillis();
    if (originString != null) {
        origin = dateFieldType.parseToMilliseconds(originString, false, null, null);
    }

    if (scaleString == null) {
        throw new ElasticsearchParseException("[{}] must be set for date fields.", DecayFunctionBuilder.SCALE);
    }
    TimeValue val = TimeValue.parseTimeValue(scaleString, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".scale");
    double scale = val.getMillis();
    val = TimeValue.parseTimeValue(offsetString, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".offset");
    double offset = val.getMillis();
    IndexNumericFieldData numericFieldData = parseContext.getForField(dateFieldType);
    return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode);
}
 
Example 2
Source File: InternalClusterInfoService.java    From Elasticsearch with Apache License 2.0 5 votes vote down vote up
@Override
public void onRefreshSettings(Settings settings) {
    TimeValue newUpdateFrequency = settings.getAsTime(
            INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL,
            InternalClusterInfoService.this.settings.getAsTime(
                    INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, DEFAULT_UPDATE_INTERVAL));
    // ClusterInfoService is only enabled if the DiskThresholdDecider is enabled
    Boolean newEnabled = settings.getAsBoolean(
            DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED,
            DiskThresholdDecider.DEFAULT_THRESHOLD_ENABLED);

    if (newUpdateFrequency != null && !updateFrequency.equals(newUpdateFrequency)) {
        if (newUpdateFrequency.getMillis() < TimeValue.timeValueSeconds(10).getMillis()) {
            logger.warn("[{}] set too low [{}] (< 10s)", INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, newUpdateFrequency);
            throw new IllegalStateException("Unable to set " + INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL + " less than 10 seconds");
        } else {
            logger.info("updating [{}] from [{}] to [{}]", INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, updateFrequency, newUpdateFrequency);
            InternalClusterInfoService.this.updateFrequency = newUpdateFrequency;
        }
    }

    TimeValue newFetchTimeout = settings.getAsTime(INTERNAL_CLUSTER_INFO_TIMEOUT,
            InternalClusterInfoService.this.settings.getAsTime(INTERNAL_CLUSTER_INFO_TIMEOUT, DEFAULT_TIMEOUT)
    );
    if (newFetchTimeout != null && !fetchTimeout.equals(newFetchTimeout)) {
        logger.info("updating fetch timeout [{}] from [{}] to [{}]", INTERNAL_CLUSTER_INFO_TIMEOUT, fetchTimeout, newFetchTimeout);
        InternalClusterInfoService.this.fetchTimeout = newFetchTimeout;
    }


    // We don't log about enabling it here, because the DiskThresholdDecider will already be logging about enable/disable
    if (newEnabled != InternalClusterInfoService.this.enabled) {
        InternalClusterInfoService.this.enabled = newEnabled;
    }
}
 
Example 3
Source File: SettingsAppliers.java    From Elasticsearch with Apache License 2.0 4 votes vote down vote up
private TimeValue validate(TimeValue value) {
    if (value.getMillis() < setting.minValue().getMillis() || value.getMillis() > setting.maxValue().getMillis()) {
        throw invalidException();
    }
    return value;
}
 
Example 4
Source File: DerivativeParser.java    From Elasticsearch with Apache License 2.0 4 votes vote down vote up
@Override
public PipelineAggregatorFactory parse(String pipelineAggregatorName, XContentParser parser, SearchContext context) throws IOException {
    XContentParser.Token token;
    String currentFieldName = null;
    String[] bucketsPaths = null;
    String format = null;
    String units = null;
    GapPolicy gapPolicy = GapPolicy.SKIP;

    while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
        if (token == XContentParser.Token.FIELD_NAME) {
            currentFieldName = parser.currentName();
        } else if (token == XContentParser.Token.VALUE_STRING) {
            if (context.parseFieldMatcher().match(currentFieldName, FORMAT)) {
                format = parser.text();
            } else if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
                bucketsPaths = new String[] { parser.text() };
            } else if (context.parseFieldMatcher().match(currentFieldName, GAP_POLICY)) {
                gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation());
            } else if (context.parseFieldMatcher().match(currentFieldName, UNIT)) {
                units = parser.text();
            } else {
                throw new SearchParseException(context, "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: ["
                        + currentFieldName + "].", parser.getTokenLocation());
            }
        } else if (token == XContentParser.Token.START_ARRAY) {
            if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
                List<String> paths = new ArrayList<>();
                while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
                    String path = parser.text();
                    paths.add(path);
                }
                bucketsPaths = paths.toArray(new String[paths.size()]);
            } else {
                throw new SearchParseException(context, "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: ["
                        + currentFieldName + "].", parser.getTokenLocation());
            }
        } else {
            throw new SearchParseException(context, "Unexpected token " + token + " in [" + pipelineAggregatorName + "].",
                    parser.getTokenLocation());
        }
    }

    if (bucketsPaths == null) {
        throw new SearchParseException(context, "Missing required field [" + BUCKETS_PATH.getPreferredName()
                + "] for derivative aggregation [" + pipelineAggregatorName + "]", parser.getTokenLocation());
    }

    ValueFormatter formatter = null;
    if (format != null) {
        formatter = ValueFormat.Patternable.Number.format(format).formatter();
    } else {
        formatter = ValueFormatter.RAW;
    }

    Long xAxisUnits = null;
    if (units != null) {
        DateTimeUnit dateTimeUnit = DateHistogramParser.DATE_FIELD_UNITS.get(units);
        if (dateTimeUnit != null) {
            xAxisUnits = dateTimeUnit.field().getDurationField().getUnitMillis();
        } else {
            TimeValue timeValue = TimeValue.parseTimeValue(units, null, getClass().getSimpleName() + ".unit");
            if (timeValue != null) {
                xAxisUnits = timeValue.getMillis();
            }
        }
    }

    return new DerivativePipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths, formatter, gapPolicy, xAxisUnits);
}
 
Example 5
Source File: InternalClusterService.java    From Elasticsearch with Apache License 2.0 4 votes vote down vote up
private void warnAboutSlowTaskIfNeeded(TimeValue executionTime, String source) {
    if (executionTime.getMillis() > slowTaskLoggingThreshold.getMillis()) {
        logger.warn("cluster state update task [{}] took {} above the warn threshold of {}", source, executionTime, slowTaskLoggingThreshold);
    }
}