org.elasticsearch.common.logging.ESLoggerFactory Java Examples

The following examples show how to use org.elasticsearch.common.logging.ESLoggerFactory. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ScriptHeuristic.java    From Elasticsearch with Apache License 2.0 6 votes vote down vote up
/**
 * Calculates score with a script
 *
 * @param subsetFreq   The frequency of the term in the selected sample
 * @param subsetSize   The size of the selected sample (typically number of docs)
 * @param supersetFreq The frequency of the term in the superset from which the sample was taken
 * @param supersetSize The size of the superset from which the sample was taken  (typically number of docs)
 * @return a "significance" score
 */
@Override
public double getScore(long subsetFreq, long subsetSize, long supersetFreq, long supersetSize) {
    if (searchScript == null) {
        //In tests, wehn calling assertSearchResponse(..) the response is streamed one additional time with an arbitrary version, see assertVersionSerializable(..).
        // Now, for version before 1.5.0 the score is computed after streaming the response but for scripts the script does not exists yet.
        // assertSearchResponse() might therefore fail although there is no problem.
        // This should be replaced by an exception in 2.0.
        ESLoggerFactory.getLogger("script heuristic").warn("cannot compute score - script has not been initialized yet.");
        return 0;
    }
    subsetSizeHolder.value = subsetSize;
    supersetSizeHolder.value = supersetSize;
    subsetDfHolder.value = subsetFreq;
    supersetDfHolder.value = supersetFreq;
    return ((Number) searchScript.run()).doubleValue();
}
 
Example #2
Source File: WebRestApiApplication.java    From metasfresh-webui-api-legacy with GNU General Public License v3.0 5 votes vote down vote up
public static void main(final String[] args)
{
	if (Check.isEmpty(System.getProperty("PropertyFile"), true))
	{
		System.setProperty("PropertyFile", "./metasfresh.properties");
	}

	// Make sure slf4j is used (by default, in v2.4.4 log4j is used, see https://github.com/metasfresh/metasfresh-webui-api/issues/757)
	ESLoggerFactory.setDefaultFactory(new Slf4jESLoggerFactory());

	try (final IAutoCloseable c = ModelValidationEngine.postponeInit())
	{
		Ini.setRunMode(RunMode.WEBUI);
		Adempiere.instance.startup(RunMode.WEBUI);

		final ArrayList<String> activeProfiles = retrieveActiveProfilesFromSysConfig();
		activeProfiles.add(Profiles.PROFILE_Webui);

		final String headless = System.getProperty(SYSTEM_PROPERTY_HEADLESS, Boolean.toString(true));

		new SpringApplicationBuilder(WebRestApiApplication.class)
				.headless(Boolean.parseBoolean(headless)) // we need headless=false for initial connection setup popup (if any), usually this only applies on dev workstations.
				.web(true)
				.profiles(activeProfiles.toArray(new String[0]))
				.beanNameGenerator(new MetasfreshBeanNameGenerator())
				.run(args);
	}

	// now init the model validation engine
	ModelValidationEngine.get();
}
 
Example #3
Source File: LdapLoginSource.java    From elasticsearch-imap with Apache License 2.0 5 votes vote down vote up
public LdapLoginSource(Map<String, Object> settings, String masterUser, String masterPassword) {
fUserNames = new ArrayList<>();
fUserPasswords = new ArrayList<>();

fMasterUser = masterUser;
fMasterPassword = masterPassword;

String url = XContentMapValues.nodeStringValue(settings.get("ldap_url"), null);
String base = XContentMapValues.nodeStringValue(settings.get("ldap_base"), null);
String user = XContentMapValues.nodeStringValue(settings.get("ldap_user"), null);
String password = XContentMapValues.nodeStringValue(settings.get("ldap_password"), null);

fConnector = new SimpleLdapConnector(url, base, user, password, true);

fNameField = XContentMapValues.nodeStringValue(settings.get("ldap_name_field"), DEF_USER_NAME_FIELD);
fPasswordField = XContentMapValues.nodeStringValue(settings.get("ldap_password_field"), DEF_USER_PW_FIELD);
fLdapFilter = fNameField + "=*";

fLogger = ESLoggerFactory.getLogger(LdapLoginSource.class.getName());

fLock = new Object();
fInitialized = false;

//start refreshing thread once initialized; interval in minutes
String refreshParam = XContentMapValues.nodeStringValue(settings.get("ldap_refresh_interval"), DEF_REFRESH_INT);
fRefreshInt = Long.parseLong(refreshParam) * 60000L;
if(fRefreshInt > 0) {
    //TODO: actually stop refreshing thread somehow
    fActive = true;
    Thread t = new Thread(this);
    t.setDaemon(true);
    t.start();
}
   }
 
Example #4
Source File: TypeParsers.java    From Elasticsearch with Apache License 2.0 4 votes vote down vote up
/**
 * Parse common field attributes such as {@code doc_values} or {@code store}.
 */
public static void parseField(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
    Version indexVersionCreated = parserContext.indexVersionCreated();
    for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
        Map.Entry<String, Object> entry = iterator.next();
        final String propName = Strings.toUnderscoreCase(entry.getKey());
        final Object propNode = entry.getValue();
        if (propName.equals("index_name") && indexVersionCreated.before(Version.V_2_0_0_beta1)) {
            builder.indexName(propNode.toString());
            iterator.remove();
        } else if (propName.equals("store")) {
            builder.store(parseStore(name, propNode.toString()));
            iterator.remove();
        } else if (propName.equals("index")) {
            parseIndex(name, propNode.toString(), builder);
            iterator.remove();
        } else if (propName.equals(DOC_VALUES)) {
            builder.docValues(nodeBooleanValue(propNode));
            iterator.remove();
        } else if (propName.equals("boost")) {
            builder.boost(nodeFloatValue(propNode));
            iterator.remove();
        } else if (propName.equals("omit_norms")) {
            builder.omitNorms(nodeBooleanValue(propNode));
            iterator.remove();
        } else if (propName.equals("norms")) {
            final Map<String, Object> properties = nodeMapValue(propNode, "norms");
            for (Iterator<Entry<String, Object>> propsIterator = properties.entrySet().iterator(); propsIterator.hasNext();) {
                Entry<String, Object> entry2 = propsIterator.next();
                final String propName2 = Strings.toUnderscoreCase(entry2.getKey());
                final Object propNode2 = entry2.getValue();
                if (propName2.equals("enabled")) {
                    builder.omitNorms(!nodeBooleanValue(propNode2));
                    propsIterator.remove();
                } else if (propName2.equals(Loading.KEY)) {
                    builder.normsLoading(Loading.parse(nodeStringValue(propNode2, null), null));
                    propsIterator.remove();
                }
            }
            DocumentMapperParser.checkNoRemainingFields(propName, properties, parserContext.indexVersionCreated());
            iterator.remove();
        } else if (propName.equals("omit_term_freq_and_positions")) {
            final IndexOptions op = nodeBooleanValue(propNode) ? IndexOptions.DOCS : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
            if (indexVersionCreated.onOrAfter(Version.V_1_0_0_RC2)) {
                throw new ElasticsearchParseException("'omit_term_freq_and_positions' is not supported anymore - use ['index_options' : 'docs']  instead");
            }
            // deprecated option for BW compat
            builder.indexOptions(op);
            iterator.remove();
        } else if (propName.equals("index_options")) {
            builder.indexOptions(nodeIndexOptionValue(propNode));
            iterator.remove();
        } else if (propName.equals("include_in_all")) {
            builder.includeInAll(nodeBooleanValue(propNode));
            iterator.remove();
        } else if (propName.equals("postings_format") && indexVersionCreated.before(Version.V_2_0_0_beta1)) {
            // ignore for old indexes
            iterator.remove();
        } else if (propName.equals("doc_values_format") && indexVersionCreated.before(Version.V_2_0_0_beta1)) {
            // ignore for old indexes
            iterator.remove();
        } else if (propName.equals("similarity")) {
            builder.similarity(parserContext.similarityLookupService().similarity(propNode.toString()));
            iterator.remove();
        } else if (propName.equals("fielddata")) {
            final Settings settings = Settings.builder().put(SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(propNode, "fielddata"))).build();
            builder.fieldDataSettings(settings);
            iterator.remove();
        } else if (propName.equals("copy_to")) {
            if (parserContext.isWithinMultiField()) {
                if (indexVersionCreated.after(Version.V_2_1_0) ||
                    (indexVersionCreated.after(Version.V_2_0_1) && indexVersionCreated.before(Version.V_2_1_0))) {
                    throw new MapperParsingException("copy_to in multi fields is not allowed. Found the copy_to in field [" + name + "] which is within a multi field.");
                } else {
                    ESLoggerFactory.getLogger("mapping [" + parserContext.type() + "]").warn("Found a copy_to in field [" + name + "] which is within a multi field. This feature has been removed and the copy_to will be ignored.");
                    // we still parse this, otherwise the message will only appear once and the copy_to removed. After that it will appear again. Better to have it always.
                }
            }
            parseCopyFields(propNode, builder);
            iterator.remove();
        }
    }
    if (indexVersionCreated.before(Version.V_2_2_0)) {
        // analyzer, search_analyzer, term_vectors were accepted on all fields
        // before 2.2, even though it made little sense
        parseAnalyzersAndTermVectors(builder, name, fieldNode, parserContext);
    }
}
 
Example #5
Source File: RestLoggerFilter.java    From es-restlog with Apache License 2.0 4 votes vote down vote up
public RestLoggerFilter(Settings settings) {
  log = ESLoggerFactory.getLogger(settings.get("restlog.category", "restlog"));
  pathFilter = pathFilter(settings.get("restlog.path_regex", ""));
  contentEncoder = encoder(settings.get("restlog.content_encoding", "json"));
  joiner = Joiner.on(" ").useForNull(settings.get("restlog.null_value", "-"));
}