Java Code Examples for java.util.Properties#putIfAbsent()
The following examples show how to use
java.util.Properties#putIfAbsent() .
These examples are extracted from open source projects.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: apicurio-registry File: KafkaClients.java License: Apache License 2.0 | 6 votes |
public static Producer<Object, ?> createProducer(Properties props, String keySerializer, String valueSerializer, String topicName, String artifactIdStrategy) { props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers()); props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + topicName); props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, keySerializer); props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, valueSerializer); // Schema Registry location. if (valueSerializer.contains("confluent")) { props.putIfAbsent(KafkaAvroSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, TestUtils.getRegistryUrl() + "/ccompat"); props.putIfAbsent(AbstractKafkaAvroSerDeConfig.AUTO_REGISTER_SCHEMAS, "false"); props.putIfAbsent(KafkaAvroSerializerConfig.VALUE_SUBJECT_NAME_STRATEGY, artifactIdStrategy); } else { props.putIfAbsent(AbstractKafkaSerDe.REGISTRY_URL_CONFIG_PARAM, TestUtils.getRegistryUrl()); props.putIfAbsent(AbstractKafkaSerializer.REGISTRY_ARTIFACT_ID_STRATEGY_CONFIG_PARAM, artifactIdStrategy); } return new KafkaProducer<>(props); }
Example 2
Source Project: apicurio-registry File: KafkaClients.java License: Apache License 2.0 | 6 votes |
public static Consumer<Long, ?> createConsumer(Properties props, String keyDeserializer, String valueDeserializer, String topicName) { props.putIfAbsent(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers()); props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "Consumer-" + topicName); props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializer); //Use Kafka Avro Deserializer. props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializer); //Schema registry location. if (valueDeserializer.contains("confluent")) { props.putIfAbsent(KafkaAvroSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, TestUtils.getRegistryUrl() + "/ccompat"); } else { props.putIfAbsent(AbstractKafkaSerDe.REGISTRY_URL_CONFIG_PARAM, TestUtils.getRegistryUrl()); } return new KafkaConsumer<>(props); }
Example 3
Source Project: constellation File: ConstellationLoggerHelper.java License: Apache License 2.0 | 6 votes |
/** * ### THIS IS TEMPORARY ### * * @param plugin The plugin run * @param terms The collection of terms being searched * @param status The status of the operation which could be a SUCCESS or * FAILURE * @param resultCount The count of results returned * @param datasource The name of the data source * @param description A description field which for example could be used to * capture the error message during a failure * @param customProperties any extra properties that should be added. * @return Enriched properties */ public static Properties searchPropertyBuilder(final Plugin plugin, final Collection<String> terms, final String status, final long resultCount, final String datasource, final String description, final Properties customProperties) { final Properties properties = new Properties(); properties.setProperty(PLUGIN_TYPE, PluginType.SEARCH.toString()); properties.setProperty(STATUS, status); properties.setProperty(TERMS, terms.toString()); properties.setProperty(COUNT, Long.toString(resultCount)); properties.setProperty(DATASOURCE, datasource); properties.setProperty(DESCRIPTION, description == null ? "" : description); if (customProperties != null) { for (Entry<Object, Object> entry : customProperties.entrySet()) { properties.putIfAbsent(entry.getKey(), entry.getValue()); } } ConstellationLogger.getDefault().pluginProperties(plugin, properties); return properties; }
Example 4
Source Project: quarkus File: InfinispanClientProcessor.java License: Apache License 2.0 | 6 votes |
@BuildStep @Record(ExecutionTime.STATIC_INIT) BeanContainerListenerBuildItem build(InfinispanRecorder recorder, InfinispanPropertiesBuildItem builderBuildItem) { Properties properties = builderBuildItem.getProperties(); InfinispanClientBuildTimeConfig conf = infinispanClient; if (log.isDebugEnabled()) { log.debugf("Applying micro profile configuration: %s", conf); } int maxEntries = conf.nearCacheMaxEntries; // Only write the entries if it is a valid number and it isn't already configured if (maxEntries > 0 && !properties.containsKey(ConfigurationProperties.NEAR_CACHE_MODE)) { // This is already empty so no need for putIfAbsent properties.put(ConfigurationProperties.NEAR_CACHE_MODE, NearCacheMode.INVALIDATED.toString()); properties.putIfAbsent(ConfigurationProperties.NEAR_CACHE_MAX_ENTRIES, maxEntries); } return new BeanContainerListenerBuildItem(recorder.configureInfinispan(properties)); }
Example 5
Source Project: brooklin File: KafkaConnectorTask.java License: BSD 2-Clause "Simplified" License | 6 votes |
@VisibleForTesting static Properties getKafkaConsumerProperties(Properties consumerProps, String groupId, KafkaConnectionString connectionString) { StringJoiner csv = new StringJoiner(","); connectionString.getBrokers().forEach(broker -> csv.add(broker.toString())); String bootstrapValue = csv.toString(); Properties props = new Properties(); props.putAll(consumerProps); props.putIfAbsent(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapValue); props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, groupId); props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); // auto-commits are unsafe props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "none"); props.putIfAbsent(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, connectionString.isSecure() ? "SSL" : "PLAINTEXT"); return props; }
Example 6
Source Project: brooklin File: LiKafkaProducerFactory.java License: BSD 2-Clause "Simplified" License | 6 votes |
static Properties buildProducerProperties(Properties prop, String clientId, String brokers, String enableLargeMessage) { prop.put(ProducerConfig.CLIENT_ID_CONFIG, clientId); prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); prop.put(LiKafkaProducerConfig.LARGE_MESSAGE_ENABLED_CONFIG, enableLargeMessage); prop.putIfAbsent(CFG_RETRY_BACKOFF_MS, DEFAULT_RETRY_BACKOFF_MS); prop.putIfAbsent(CFG_REQUEST_TIMEOUT_MS, DEFAULT_REQUEST_TIMEOUT_MS); prop.putIfAbsent(CFG_METADATA_EXPIRY_MS, DEFAULT_METADATA_EXPIRY_MS); prop.putIfAbsent(CFG_MAX_PARTITION_BYTES, DEFAULT_MAX_PARTITION_BYTES); prop.putIfAbsent(CFG_TOTAL_MEMORY_BYTES, DEFAULT_TOTAL_MEMORY_BYTES); prop.putIfAbsent(CFG_REQUEST_REQUIRED_ACKS, DEFAULT_REQUEST_REQUIRED_ACKS); prop.putIfAbsent(CFG_LINGER_MS, DEFAULT_LINGER_MS); prop.putIfAbsent(CFG_SEND_BUFFER_BYTES, DEFAULT_SEND_BUFFER_BYTES); prop.putIfAbsent(CFG_RECEIVE_BUFFER_BYTES, DEFAULT_RECEIVE_BUFFER_BYTES); prop.putIfAbsent(CFG_MAX_REQUEST_SIZE, DEFAULT_MAX_REQUEST_SIZE); prop.putIfAbsent(CFG_RECONNECT_BACKOFF_MS, DEFAULT_RECONNECT_BACKOFF_MS); prop.putIfAbsent(CFG_MAX_BLOCK_MS, DEFAULT_MAX_BLOCK_MS); prop.putIfAbsent(CFG_MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, DEFAULT_MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION); prop.putIfAbsent(CFG_REQUEST_RETRIES, DEFAULT_REQUEST_RETRIES); prop.putIfAbsent(CFG_COMPRESSION_TYPE, DEFAULT_COMPRESSION_TYPE); return prop; }
Example 7
Source Project: brooklin File: LiKafkaProducerFactory.java License: BSD 2-Clause "Simplified" License | 6 votes |
@Override public Producer<byte[], byte[]> createProducer(Properties transportProps) { VerifiableProperties transportProviderProperties = new VerifiableProperties(transportProps); String clientId = transportProviderProperties.getString(ProducerConfig.CLIENT_ID_CONFIG); String bootstrapServers = transportProviderProperties.getString(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG); Properties producerConfig = transportProviderProperties.getDomainProperties(DOMAIN_PRODUCER); Validate.notEmpty(clientId, "clientId cannot be empty."); Validate.notEmpty(bootstrapServers, "bootstrapServers cannot be empty."); producerConfig = buildProducerProperties(producerConfig, clientId, bootstrapServers, DEFAULT_ENABLE_LARGE_MESSAGE); // Default DeSerializer for Key and Payload producerConfig.putIfAbsent(LiKafkaProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getCanonicalName()); producerConfig.putIfAbsent(LiKafkaProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getCanonicalName()); return new LiKafkaProducerImpl<>(producerConfig); }
Example 8
Source Project: brooklin File: KafkaTransportProviderAdmin.java License: BSD 2-Clause "Simplified" License | 6 votes |
@Override public TransportProvider assignTransportProvider(DatastreamTask task) { Validate.notNull(task, "null task"); if (!_transportProviders.containsKey(task)) { String connectorType = task.getConnectorType(); String destinationBrokers = getDestinationBrokers(task.getDatastreams().get(0)); if (!_kafkaProducers.containsKey(connectorType) || !_kafkaProducers.get(connectorType).containsKey(destinationBrokers)) { initializeKafkaProducersForConnectorDestination(connectorType, destinationBrokers); } List<KafkaProducerWrapper<byte[], byte[]>> producers = getNextKafkaProducers(connectorType, destinationBrokers, numProducersPerTask(task)); Properties transportProviderProperties = new Properties(); transportProviderProperties.putAll(_transportProviderProperties); transportProviderProperties.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, destinationBrokers); _transportProviders.put(task, new KafkaTransportProvider(task, producers, transportProviderProperties, _transportProviderMetricsNamesPrefix)); producers.forEach(p -> p.assignTask(task)); } else { LOG.warn("Trying to assign transport provider to task {} which is already assigned.", task); } return _transportProviders.get(task); }
Example 9
Source Project: micronaut-kafka File: AbstractKafkaStreamsConfiguration.java License: Apache License 2.0 | 5 votes |
/** * Shared initialization. * * @param applicationConfiguration The application config * @param environment The env * @param config The config to be initialized */ protected void init(ApplicationConfiguration applicationConfiguration, Environment environment, Properties config) { // set the default application id String applicationName = applicationConfiguration.getName().orElse(Environment.DEFAULT_NAME); config.putIfAbsent(StreamsConfig.APPLICATION_ID_CONFIG, applicationName); if (environment.getActiveNames().contains(Environment.TEST)) { String tmpDir = System.getProperty("java.io.tmpdir"); if (StringUtils.isNotEmpty(tmpDir)) { if (new File(tmpDir, applicationName).mkdirs()) { config.putIfAbsent(StreamsConfig.STATE_DIR_CONFIG, tmpDir); } } } }
Example 10
Source Project: DashboardFx File: App.java License: GNU General Public License v3.0 | 5 votes |
@SuppressWarnings("ResultOfMethodCallIgnored") private String logged(){ try { File file = new File("dashboard.properties"); Properties properties = new Properties(); if(!file.exists()){ file.createNewFile(); return "account"; } else { FileInputStream fileInputStream = new FileInputStream(file); properties.load(fileInputStream); properties.putIfAbsent("logged", "false"); FileOutputStream fileOutputStream = new FileOutputStream(file); properties.store(fileOutputStream, "Dashboard properties"); File directory = new File("user/"); properties.load(fileInputStream); if(directory.exists()){ if(properties.getProperty("logged").equals("false")) return "login"; else return "main"; } else return "account"; } } catch (IOException e) { e.printStackTrace(); } return null; }
Example 11
Source Project: quarkus File: QuarkusTask.java License: Apache License 2.0 | 5 votes |
protected Properties getBuildSystemProperties(AppArtifact appArtifact) { final Map<String, ?> properties = getProject().getProperties(); final Properties realProperties = new Properties(); for (Map.Entry<String, ?> entry : properties.entrySet()) { final String key = entry.getKey(); final Object value = entry.getValue(); if (key != null && value instanceof String && key.startsWith("quarkus.")) { realProperties.setProperty(key, (String) value); } } realProperties.putIfAbsent("quarkus.application.name", appArtifact.getArtifactId()); realProperties.putIfAbsent("quarkus.application.version", appArtifact.getVersion()); return realProperties; }
Example 12
Source Project: hawkular-alerts File: EmailPlugin.java License: Apache License 2.0 | 5 votes |
private void initMailSession(ActionMessage msg) { boolean offLine = System.getProperty(MAIL_SESSION_OFFLINE) != null; if (!offLine) { Properties emailProperties = new Properties(); msg.getAction().getProperties().entrySet().stream() .filter(e -> e.getKey().startsWith("mail.")) .forEach(e -> { emailProperties.put(e.getKey(), e.getValue()); }); Properties systemProperties = System.getProperties(); for (String property : systemProperties.stringPropertyNames()) { if (property.startsWith("mail.")) { emailProperties.putIfAbsent(property, System.getProperty(property)); } } emailProperties.putIfAbsent("mail.smtp.host", DEFAULT_MAIL_SMTP_HOST); emailProperties.putIfAbsent("mail.smtp.port", DEFAULT_MAIL_SMTP_PORT); if (emailProperties.containsKey("mail.smtp.user") && emailProperties.containsKey("mail.smtp.pass")) { String user = emailProperties.getProperty("mail.smtp.user"); String password = emailProperties.getProperty("mail.smtp.pass"); mailSession = Session.getInstance(emailProperties, new Authenticator() { @Override protected PasswordAuthentication getPasswordAuthentication() { return new PasswordAuthentication(user, password); } }); } else { mailSession = Session.getInstance(emailProperties); } } }
Example 13
Source Project: spring-batch-lightmin File: AddressMigratorApp.java License: Apache License 2.0 | 5 votes |
public static void main(final String[] args) { final SpringApplication application = new SpringApplication(AddressMigratorApp2.class); final Properties properties = new Properties(); properties.putIfAbsent("server.port", 9009); application.setDefaultProperties(properties); application.run(args); }
Example 14
Source Project: brooklin File: KafkaConsumerFactoryImpl.java License: BSD 2-Clause "Simplified" License | 5 votes |
static Properties addConsumerDefaultProperties(Properties properties) { properties.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, DEFAULT_KEY_DESERIALIZER); properties.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, DEFAULT_VAL_DESERIALIZER); return properties; }
Example 15
Source Project: brooklin File: KafkaTransportProviderAdmin.java License: BSD 2-Clause "Simplified" License | 5 votes |
private void initializeKafkaProducersForConnectorDestination(String connectorType, String destinationBrokers) { Properties transportProviderProperties = new Properties(); transportProviderProperties.putAll(_transportProviderProperties); transportProviderProperties.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, destinationBrokers); List<KafkaProducerWrapper<byte[], byte[]>> producers = IntStream.range(0, _numProducersPerConnector) .mapToObj(x -> new KafkaProducerWrapper<byte[], byte[]>(String.format("%s:%s", connectorType, x), transportProviderProperties, _transportProviderMetricsNamesPrefix)) .collect(Collectors.toList()); _kafkaProducers.putIfAbsent(connectorType, new HashMap<>()); _kafkaProducers.get(connectorType).putIfAbsent(destinationBrokers, new ArrayList<>()); _kafkaProducers.get(connectorType).get(destinationBrokers).addAll(producers); }
Example 16
Source Project: spring-batch-lightmin File: AddressMigratorApp2.java License: Apache License 2.0 | 5 votes |
public static void main(final String[] args) { final SpringApplication application = new SpringApplication(AddressMigratorApp2.class); final Properties properties = new Properties(); properties.putIfAbsent("server.port", 9010); application.setDefaultProperties(properties); application.run(args); }
Example 17
Source Project: Flink-CEPplus File: KinesisExample.java License: Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); String inputStream = parameterTool.getRequired("input-stream"); String outputStream = parameterTool.getRequired("output-stream"); FlinkKinesisConsumer<KafkaEvent> consumer = new FlinkKinesisConsumer<>( inputStream, new KafkaEventSchema(), parameterTool.getProperties()); consumer.setPeriodicWatermarkAssigner(new CustomWatermarkExtractor()); Properties producerProperties = new Properties(parameterTool.getProperties()); // producer needs region even when URL is specified producerProperties.putIfAbsent(ConsumerConfigConstants.AWS_REGION, "us-east-1"); // test driver does not deaggregate producerProperties.putIfAbsent("AggregationEnabled", String.valueOf(false)); // KPL does not recognize endpoint URL.. String kinesisUrl = producerProperties.getProperty(ConsumerConfigConstants.AWS_ENDPOINT); if (kinesisUrl != null) { URL url = new URL(kinesisUrl); producerProperties.put("KinesisEndpoint", url.getHost()); producerProperties.put("KinesisPort", Integer.toString(url.getPort())); producerProperties.put("VerifyCertificate", "false"); } FlinkKinesisProducer<KafkaEvent> producer = new FlinkKinesisProducer<>( new KafkaEventSchema(), producerProperties); producer.setDefaultStream(outputStream); producer.setDefaultPartition("fakePartition"); DataStream<KafkaEvent> input = env .addSource(consumer) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink(producer); env.execute(); }
Example 18
Source Project: flink File: KinesisExample.java License: Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); String inputStream = parameterTool.getRequired("input-stream"); String outputStream = parameterTool.getRequired("output-stream"); FlinkKinesisConsumer<KafkaEvent> consumer = new FlinkKinesisConsumer<>( inputStream, new KafkaEventSchema(), parameterTool.getProperties()); consumer.setPeriodicWatermarkAssigner(new CustomWatermarkExtractor()); Properties producerProperties = new Properties(parameterTool.getProperties()); // producer needs region even when URL is specified producerProperties.putIfAbsent(ConsumerConfigConstants.AWS_REGION, "us-east-1"); // test driver does not deaggregate producerProperties.putIfAbsent("AggregationEnabled", String.valueOf(false)); // KPL does not recognize endpoint URL.. String kinesisUrl = producerProperties.getProperty(ConsumerConfigConstants.AWS_ENDPOINT); if (kinesisUrl != null) { URL url = new URL(kinesisUrl); producerProperties.put("KinesisEndpoint", url.getHost()); producerProperties.put("KinesisPort", Integer.toString(url.getPort())); producerProperties.put("VerifyCertificate", "false"); } FlinkKinesisProducer<KafkaEvent> producer = new FlinkKinesisProducer<>( new KafkaEventSchema(), producerProperties); producer.setDefaultStream(outputStream); producer.setDefaultPartition("fakePartition"); DataStream<KafkaEvent> input = env .addSource(consumer) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink(producer); env.execute(); }
Example 19
Source Project: flink File: KinesisExample.java License: Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); String inputStream = parameterTool.getRequired("input-stream"); String outputStream = parameterTool.getRequired("output-stream"); FlinkKinesisConsumer<KafkaEvent> consumer = new FlinkKinesisConsumer<>( inputStream, new KafkaEventSchema(), parameterTool.getProperties()); consumer.setPeriodicWatermarkAssigner(new CustomWatermarkExtractor()); Properties producerProperties = new Properties(parameterTool.getProperties()); // producer needs region even when URL is specified producerProperties.putIfAbsent(ConsumerConfigConstants.AWS_REGION, "us-east-1"); // test driver does not deaggregate producerProperties.putIfAbsent("AggregationEnabled", String.valueOf(false)); // KPL does not recognize endpoint URL.. String kinesisUrl = producerProperties.getProperty(ConsumerConfigConstants.AWS_ENDPOINT); if (kinesisUrl != null) { URL url = new URL(kinesisUrl); producerProperties.put("KinesisEndpoint", url.getHost()); producerProperties.put("KinesisPort", Integer.toString(url.getPort())); producerProperties.put("VerifyCertificate", "false"); } FlinkKinesisProducer<KafkaEvent> producer = new FlinkKinesisProducer<>( new KafkaEventSchema(), producerProperties); producer.setDefaultStream(outputStream); producer.setDefaultPartition("fakePartition"); DataStream<KafkaEvent> input = env .addSource(consumer) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink(producer); env.execute(); }
Example 20
Source Project: brooklin File: SimpleKafkaProducerFactory.java License: BSD 2-Clause "Simplified" License | 4 votes |
static Properties addProducerDefaultProperties(Properties properties) { properties.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, DEFAULT_KEY_SERIALIZER); properties.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, DEFAULT_VAL_SERIALIZER); return properties; }