Java Code Examples for org.apache.hadoop.conf.Configuration#setBooleanIfUnset()
The following examples show how to use
org.apache.hadoop.conf.Configuration#setBooleanIfUnset() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Constraints.java From hbase with Apache License 2.0 | 6 votes |
/** * Setup the configuration for a constraint as to whether it is enabled and * its priority * * @param conf * on which to base the new configuration * @param enabled * <tt>true</tt> if it should be run * @param priority * relative to other constraints * @return a new configuration, storable in the {@link HTableDescriptor} */ private static Configuration configure(Configuration conf, boolean enabled, long priority) { // create the configuration to actually be stored // clone if possible, but otherwise just create an empty configuration Configuration toWrite = conf == null ? new Configuration() : new Configuration(conf); // update internal properties toWrite.setBooleanIfUnset(ENABLED_KEY, enabled); // set if unset long if (toWrite.getLong(PRIORITY_KEY, UNSET_PRIORITY) == UNSET_PRIORITY) { toWrite.setLong(PRIORITY_KEY, priority); } return toWrite; }
Example 2
Source File: HdfsConfigurationInitializer.java From presto with Apache License 2.0 | 5 votes |
public void initializeConfiguration(Configuration config) { copy(resourcesConfiguration, config); // this is to prevent dfs client from doing reverse DNS lookups to determine whether nodes are rack local config.setClass(NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, NoOpDNSToSwitchMapping.class, DNSToSwitchMapping.class); if (socksProxy != null) { config.setClass(HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY, SocksSocketFactory.class, SocketFactory.class); config.set(HADOOP_SOCKS_SERVER_KEY, socksProxy.toString()); } if (domainSocketPath != null) { config.setStrings(DFS_DOMAIN_SOCKET_PATH_KEY, domainSocketPath); } // only enable short circuit reads if domain socket path is properly configured if (!config.get(DFS_DOMAIN_SOCKET_PATH_KEY, "").trim().isEmpty()) { config.setBooleanIfUnset(HdfsClientConfigKeys.Read.ShortCircuit.KEY, true); } config.setInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, toIntExact(dfsTimeout.toMillis())); config.setInt(IPC_PING_INTERVAL_KEY, toIntExact(ipcPingInterval.toMillis())); config.setInt(IPC_CLIENT_CONNECT_TIMEOUT_KEY, toIntExact(dfsConnectTimeout.toMillis())); config.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, dfsConnectMaxRetries); config.setInt(DFS_CLIENT_KEY_PROVIDER_CACHE_EXPIRY_MS, dfsKeyProviderCacheTtlMillis); if (wireEncryptionEnabled) { config.set(HADOOP_RPC_PROTECTION, "privacy"); config.setBoolean("dfs.encrypt.data.transfer", true); } config.setInt("fs.cache.max-size", fileSystemMaxCacheSize); configurationInitializers.forEach(configurationInitializer -> configurationInitializer.initializeConfiguration(config)); }
Example 3
Source File: DBScanJobRunner.java From geowave with Apache License 2.0 | 4 votes |
@Override public void configure(final Job job) throws Exception { super.configure(job); job.setMapperClass(NNMapReduce.NNMapper.class); job.setReducerClass(DBScanMapReduce.DBScanMapHullReducer.class); job.setMapOutputKeyClass(PartitionDataWritable.class); job.setMapOutputValueClass(AdapterWithObjectWritable.class); job.setOutputKeyClass(GeoWaveInputKey.class); job.setOutputValueClass(ObjectWritable.class); job.setSpeculativeExecution(false); final Configuration conf = job.getConfiguration(); conf.set("mapreduce.map.java.opts", "-Xmx" + memInMB + "m"); conf.set("mapreduce.reduce.java.opts", "-Xmx" + memInMB + "m"); conf.setLong("mapred.task.timeout", 2000000); conf.setInt("mapreduce.task.io.sort.mb", 250); job.getConfiguration().setBoolean("mapreduce.reduce.speculative", false); Class<? extends CompressionCodec> bestCodecClass = org.apache.hadoop.io.compress.DefaultCodec.class; int rank = 0; for (final Class<? extends CompressionCodec> codecClass : CompressionCodecFactory.getCodecClasses( conf)) { int r = 1; for (final String codecs : CodecsRank) { if (codecClass.getName().contains(codecs)) { break; } r++; } if ((rank < r) && (r <= CodecsRank.length)) { try { final CompressionCodec codec = codecClass.newInstance(); if (Configurable.class.isAssignableFrom(codecClass)) { ((Configurable) codec).setConf(conf); } // throws an exception if not configurable in this context CodecPool.getCompressor(codec); bestCodecClass = codecClass; rank = r; } catch (final Throwable ex) { // occurs when codec is not installed. LOGGER.info("Not configuable in this context", ex); } } } LOGGER.warn("Compression with " + bestCodecClass.toString()); conf.setClass("mapreduce.map.output.compress.codec", bestCodecClass, CompressionCodec.class); conf.setBoolean("mapreduce.map.output.compress", true); conf.setBooleanIfUnset("first.iteration", firstIteration); }