org.apache.hadoop.hbase.util.Strings Java Examples

The following examples show how to use org.apache.hadoop.hbase.util.Strings. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ReplicationLoad.java    From hbase with Apache License 2.0 6 votes vote down vote up
/**
 * sourceToString
 * @return a string contains sourceReplicationLoad information
 */
public String sourceToString() {
  StringBuilder sb = new StringBuilder();

  for (ClusterStatusProtos.ReplicationLoadSource rls :
      this.replicationLoadSourceEntries) {

    sb = Strings.appendKeyValue(sb, "\n           PeerID", rls.getPeerID());
    sb = Strings.appendKeyValue(sb, "AgeOfLastShippedOp", rls.getAgeOfLastShippedOp());
    sb = Strings.appendKeyValue(sb, "SizeOfLogQueue", rls.getSizeOfLogQueue());
    sb =
        Strings.appendKeyValue(sb, "TimestampsOfLastShippedOp",
            (new Date(rls.getTimeStampOfLastShippedOp()).toString()));
    sb = Strings.appendKeyValue(sb, "Replication Lag", rls.getReplicationLag());
  }

  return sb.toString();
}
 
Example #2
Source File: TableInputFormatBase.java    From hbase with Apache License 2.0 6 votes vote down vote up
String reverseDNS(InetAddress ipAddress) throws UnknownHostException {
  String hostName = this.reverseDNSCacheMap.get(ipAddress);
  if (hostName == null) {
    String ipAddressString = null;
    try {
      ipAddressString = DNS.reverseDns(ipAddress, null);
    } catch (Exception e) {
      // We can use InetAddress in case the jndi failed to pull up the reverse DNS entry from the
      // name service. Also, in case of ipv6, we need to use the InetAddress since resolving
      // reverse DNS using jndi doesn't work well with ipv6 addresses.
      ipAddressString = InetAddress.getByName(ipAddress.getHostAddress()).getHostName();
    }
    if (ipAddressString == null) throw new UnknownHostException("No host found for " + ipAddress);
    hostName = Strings.domainNamePointerToHostName(ipAddressString);
    this.reverseDNSCacheMap.put(ipAddress, hostName);
  }
  return hostName;
}
 
Example #3
Source File: KeyValueTestUtil.java    From hbase with Apache License 2.0 6 votes vote down vote up
protected static String toStringWithPadding(final KeyValue kv, final int maxRowLength,
    int maxFamilyLength, int maxQualifierLength, int maxTimestampLength, boolean includeMeta) {
  String leadingLengths = "";
  String familyLength = kv.getFamilyLength() + " ";
  if (includeMeta) {
    leadingLengths += Strings.padFront(kv.getKeyLength() + "", '0', 4);
    leadingLengths += " ";
    leadingLengths += Strings.padFront(kv.getValueLength() + "", '0', 4);
    leadingLengths += " ";
    leadingLengths += Strings.padFront(kv.getRowLength() + "", '0', 2);
    leadingLengths += " ";
  }
  int spacesAfterRow = maxRowLength - getRowString(kv).length() + 2;
  int spacesAfterFamily = maxFamilyLength - getFamilyString(kv).length() + 2;
  int spacesAfterQualifier = maxQualifierLength - getQualifierString(kv).length() + 1;
  int spacesAfterTimestamp = maxTimestampLength
      - Long.valueOf(kv.getTimestamp()).toString().length() + 1;
  return leadingLengths + getRowString(kv) + StringUtils.repeat(' ', spacesAfterRow)
      + familyLength + getFamilyString(kv) + StringUtils.repeat(' ', spacesAfterFamily)
      + getQualifierString(kv) + StringUtils.repeat(' ', spacesAfterQualifier)
      + getTimestampString(kv) + StringUtils.repeat(' ', spacesAfterTimestamp)
      + getTypeString(kv) + " " + getValueString(kv);
}
 
Example #4
Source File: HostnameSupplier.java    From DataLink with Apache License 2.0 5 votes vote down vote up
@Override
public String get() {
    try {
        final String dnsInterface = hbaseConfig.get("hbase.regionserver.dns.interface", DEFAULT_DNS_INTERFACE);
        String hostname = Strings.domainNamePointerToHostName(DNS.getDefaultHost(dnsInterface));
        return hostname;
    } catch( UnknownHostException ukhe) {
        throw new RuntimeException(ukhe);
    }
}
 
Example #5
Source File: ReplicationLoad.java    From hbase with Apache License 2.0 5 votes vote down vote up
/**
 * sinkToString
 * @return a string contains sinkReplicationLoad information
 */
public String sinkToString() {
  if (this.replicationLoadSink == null) return null;

  StringBuilder sb = new StringBuilder();
  sb =
      Strings.appendKeyValue(sb, "AgeOfLastAppliedOp",
        this.replicationLoadSink.getAgeOfLastAppliedOp());
  sb =
      Strings.appendKeyValue(sb, "TimestampsOfLastAppliedOp",
        (new Date(this.replicationLoadSink.getTimeStampsOfLastAppliedOp()).toString()));

  return sb.toString();
}
 
Example #6
Source File: TestEncodedSeekers.java    From hbase with Apache License 2.0 5 votes vote down vote up
private void doPuts(HRegion region) throws IOException{
  LoadTestKVGenerator dataGenerator = new LoadTestKVGenerator(MIN_VALUE_SIZE, MAX_VALUE_SIZE);
   for (int i = 0; i < NUM_ROWS; ++i) {
    byte[] key = Bytes.toBytes(LoadTestKVGenerator.md5PrefixedKey(i));
    for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
      Put put = new Put(key);
      put.setDurability(Durability.ASYNC_WAL);
      byte[] col = Bytes.toBytes(String.valueOf(j));
      byte[] value = dataGenerator.generateRandomSizeValue(key, col);
      if (includeTags) {
        Tag[] tag = new Tag[1];
        tag[0] = new ArrayBackedTag((byte) 1, "Visibility");
        KeyValue kv = new KeyValue(key, CF_BYTES, col, HConstants.LATEST_TIMESTAMP, value, tag);
        put.add(kv);
      } else {
        put.addColumn(CF_BYTES, col, value);
      }
      if(VERBOSE){
        KeyValue kvPut = new KeyValue(key, CF_BYTES, col, value);
        System.err.println(Strings.padFront(i+"", ' ', 4)+" "+kvPut);
      }
      region.put(put);
    }
    if (i % NUM_ROWS_PER_FLUSH == 0) {
      region.flush(true);
    }
  }
}
 
Example #7
Source File: UserMetricsBuilder.java    From hbase with Apache License 2.0 5 votes vote down vote up
@Override
public String toString() {
  StringBuilder sb = Strings
      .appendKeyValue(new StringBuilder(), "readRequestCount", this.getReadRequestCount());
  Strings.appendKeyValue(sb, "writeRequestCount", this.getWriteRequestCount());
  Strings.appendKeyValue(sb, "filteredReadRequestCount", this.getFilteredReadRequests());
  return sb.toString();
}
 
Example #8
Source File: RESTServer.java    From hbase with Apache License 2.0 5 votes vote down vote up
private static Pair<FilterHolder, Class<? extends ServletContainer>> loginServerPrincipal(
  UserProvider userProvider, Configuration conf) throws Exception {
  Class<? extends ServletContainer> containerClass = ServletContainer.class;
  if (userProvider.isHadoopSecurityEnabled() && userProvider.isHBaseSecurityEnabled()) {
    String machineName = Strings.domainNamePointerToHostName(
      DNS.getDefaultHost(conf.get(REST_DNS_INTERFACE, "default"),
        conf.get(REST_DNS_NAMESERVER, "default")));
    String keytabFilename = conf.get(REST_KEYTAB_FILE);
    Preconditions.checkArgument(keytabFilename != null && !keytabFilename.isEmpty(),
      REST_KEYTAB_FILE + " should be set if security is enabled");
    String principalConfig = conf.get(REST_KERBEROS_PRINCIPAL);
    Preconditions.checkArgument(principalConfig != null && !principalConfig.isEmpty(),
      REST_KERBEROS_PRINCIPAL + " should be set if security is enabled");
    // Hook for unit tests, this will log out any other user and mess up tests.
    if (!conf.getBoolean(SKIP_LOGIN_KEY, false)) {
      userProvider.login(REST_KEYTAB_FILE, REST_KERBEROS_PRINCIPAL, machineName);
    }
    if (conf.get(REST_AUTHENTICATION_TYPE) != null) {
      containerClass = RESTServletContainer.class;
      FilterHolder authFilter = new FilterHolder();
      authFilter.setClassName(AuthFilter.class.getName());
      authFilter.setName("AuthenticationFilter");
      return new Pair<>(authFilter,containerClass);
    }
  }
  return new Pair<>(null, containerClass);
}
 
Example #9
Source File: AuthFilter.java    From hbase with Apache License 2.0 5 votes vote down vote up
/**
 * Returns the configuration to be used by the authentication filter
 * to initialize the authentication handler.
 *
 * This filter retrieves all HBase configurations and passes those started
 * with REST_PREFIX to the authentication handler.  It is useful to support
 * plugging different authentication handlers.
*/
@Override
protected Properties getConfiguration(
    String configPrefix, FilterConfig filterConfig) throws ServletException {
  Properties props = super.getConfiguration(configPrefix, filterConfig);
  //setting the cookie path to root '/' so it is used for all resources.
  props.setProperty(AuthenticationFilter.COOKIE_PATH, "/");

  Configuration conf = null;
  // Dirty hack to get at the RESTServer's configuration. These should be pulled out
  // of the FilterConfig.
  if (RESTServer.conf != null) {
    conf = RESTServer.conf;
  } else {
    conf = HBaseConfiguration.create();
  }
  for (Map.Entry<String, String> entry : conf) {
    String name = entry.getKey();
    if (name.startsWith(REST_PREFIX)) {
      String value = entry.getValue();
      if(name.equals(REST_AUTHENTICATION_PRINCIPAL))  {
        try {
          String machineName = Strings.domainNamePointerToHostName(
            DNS.getDefaultHost(conf.get(REST_DNS_INTERFACE, "default"),
              conf.get(REST_DNS_NAMESERVER, "default")));
          value = SecurityUtil.getServerPrincipal(value, machineName);
        } catch (IOException ie) {
          throw new ServletException("Failed to retrieve server principal", ie);
        }
      }
      if (LOG.isTraceEnabled()) {
        LOG.trace("Setting property " + name + "=" + value);
      }
      name = name.substring(REST_PREFIX_LEN);
      props.setProperty(name, value);
    }
  }
  return props;
}
 
Example #10
Source File: Main.java    From hbase-indexer with Apache License 2.0 5 votes vote down vote up
/**
 * @param conf the configuration object containing the hbase-indexer configuration, as well
 *             as the hbase/hadoop settings. Typically created using {@link HBaseIndexerConfiguration}.
 */
public void startServices(Configuration conf) throws Exception {
    String hostname = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
            conf.get("hbase.regionserver.dns.interface", "default"),
            conf.get("hbase.regionserver.dns.nameserver", "default")));

    log.debug("Using hostname " + hostname);

    String zkConnectString = conf.get(ConfKeys.ZK_CONNECT_STRING);
    int zkSessionTimeout = HBaseIndexerConfiguration.getSessionTimeout(conf);
    zk = new StateWatchingZooKeeper(zkConnectString, zkSessionTimeout);

    tablePool = ConnectionFactory.createConnection(conf);

    String zkRoot = conf.get(ConfKeys.ZK_ROOT_NODE);

    indexerModel = new IndexerModelImpl(zk, zkRoot);

    sepModel = new SepModelImpl(zk, conf);

    indexerMaster = new IndexerMaster(zk, indexerModel, conf, conf, zkConnectString,
            sepModel);
    indexerMaster.start();

    IndexerRegistry indexerRegistry = new IndexerRegistry();
    IndexerProcessRegistry indexerProcessRegistry = new IndexerProcessRegistryImpl(zk, conf);
    indexerSupervisor = new IndexerSupervisor(indexerModel, zk, hostname, indexerRegistry,
            indexerProcessRegistry, tablePool, conf);

    indexerSupervisor.init();
    startHttpServer();

}
 
Example #11
Source File: HBaseInputFormatGranular.java    From SpyGlass with Apache License 2.0 5 votes vote down vote up
private String reverseDNS(InetAddress ipAddress) throws NamingException {
	String hostName = this.reverseDNSCacheMap.get(ipAddress);
	if (hostName == null) {
		hostName = Strings.domainNamePointerToHostName(DNS.reverseDns(
				ipAddress, this.nameServer));
		this.reverseDNSCacheMap.put(ipAddress, hostName);
	}
	return hostName;
}
 
Example #12
Source File: ThriftServer.java    From hbase with Apache License 2.0 4 votes vote down vote up
protected void setupParamters() throws IOException {
  // login the server principal (if using secure Hadoop)
  UserProvider userProvider = UserProvider.instantiate(conf);
  securityEnabled = userProvider.isHadoopSecurityEnabled()
      && userProvider.isHBaseSecurityEnabled();
  if (securityEnabled) {
    host = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
        conf.get(THRIFT_DNS_INTERFACE_KEY, "default"),
        conf.get(THRIFT_DNS_NAMESERVER_KEY, "default")));
    userProvider.login(THRIFT_KEYTAB_FILE_KEY, THRIFT_KERBEROS_PRINCIPAL_KEY, host);

    // Setup the SPNEGO user for HTTP if configured
    String spnegoPrincipal = getSpengoPrincipal(conf, host);
    String spnegoKeytab = getSpnegoKeytab(conf);
    UserGroupInformation.setConfiguration(conf);
    // login the SPNEGO principal using UGI to avoid polluting the login user
    this.httpUGI = UserGroupInformation.loginUserFromKeytabAndReturnUGI(spnegoPrincipal,
      spnegoKeytab);
  }
  this.serviceUGI = userProvider.getCurrent().getUGI();
  if (httpUGI == null) {
    this.httpUGI = serviceUGI;
  }

  this.listenPort = conf.getInt(PORT_CONF_KEY, DEFAULT_LISTEN_PORT);
  this.metrics = createThriftMetrics(conf);
  this.pauseMonitor = new JvmPauseMonitor(conf, this.metrics.getSource());
  this.hbaseServiceHandler = createHandler(conf, userProvider);
  this.hbaseServiceHandler.initMetrics(metrics);
  this.processor = createProcessor();

  httpEnabled = conf.getBoolean(USE_HTTP_CONF_KEY, false);
  doAsEnabled = conf.getBoolean(THRIFT_SUPPORT_PROXYUSER_KEY, false);
  if (doAsEnabled && !httpEnabled) {
    LOG.warn("Fail to enable the doAs feature. " + USE_HTTP_CONF_KEY + " is not configured");
  }

  String strQop = conf.get(THRIFT_QOP_KEY);
  if (strQop != null) {
    this.qop = SaslUtil.getQop(strQop);
  }
  if (qop != null) {
    if (qop != SaslUtil.QualityOfProtection.AUTHENTICATION &&
        qop != SaslUtil.QualityOfProtection.INTEGRITY &&
        qop != SaslUtil.QualityOfProtection.PRIVACY) {
      throw new IOException(String.format("Invalid %s: It must be one of %s, %s, or %s.",
          THRIFT_QOP_KEY,
          SaslUtil.QualityOfProtection.AUTHENTICATION.name(),
          SaslUtil.QualityOfProtection.INTEGRITY.name(),
          SaslUtil.QualityOfProtection.PRIVACY.name()));
    }
    checkHttpSecurity(qop, conf);
    if (!securityEnabled) {
      throw new IOException("Thrift server must run in secure mode to support authentication");
    }
  }
  registerFilters(conf);
  pauseMonitor.start();
}
 
Example #13
Source File: ServerMetricsBuilder.java    From hbase with Apache License 2.0 4 votes vote down vote up
@Override
public String toString() {
  int storeCount = 0;
  int storeFileCount = 0;
  int storeRefCount = 0;
  int maxCompactedStoreFileRefCount = 0;
  long uncompressedStoreFileSizeMB = 0;
  long storeFileSizeMB = 0;
  long memStoreSizeMB = 0;
  long storefileIndexSizeKB = 0;
  long rootLevelIndexSizeKB = 0;
  long readRequestsCount = 0;
  long cpRequestsCount = 0;
  long writeRequestsCount = 0;
  long filteredReadRequestsCount = 0;
  long bloomFilterSizeMB = 0;
  long compactingCellCount = 0;
  long compactedCellCount = 0;
  for (RegionMetrics r : getRegionMetrics().values()) {
    storeCount += r.getStoreCount();
    storeFileCount += r.getStoreFileCount();
    storeRefCount += r.getStoreRefCount();
    int currentMaxCompactedStoreFileRefCount = r.getMaxCompactedStoreFileRefCount();
    maxCompactedStoreFileRefCount = Math.max(maxCompactedStoreFileRefCount,
      currentMaxCompactedStoreFileRefCount);
    uncompressedStoreFileSizeMB += r.getUncompressedStoreFileSize().get(Size.Unit.MEGABYTE);
    storeFileSizeMB += r.getStoreFileSize().get(Size.Unit.MEGABYTE);
    memStoreSizeMB += r.getMemStoreSize().get(Size.Unit.MEGABYTE);
    storefileIndexSizeKB += r.getStoreFileUncompressedDataIndexSize().get(Size.Unit.KILOBYTE);
    readRequestsCount += r.getReadRequestCount();
    cpRequestsCount += r.getCpRequestCount();
    writeRequestsCount += r.getWriteRequestCount();
    filteredReadRequestsCount += r.getFilteredReadRequestCount();
    rootLevelIndexSizeKB += r.getStoreFileRootLevelIndexSize().get(Size.Unit.KILOBYTE);
    bloomFilterSizeMB += r.getBloomFilterSize().get(Size.Unit.MEGABYTE);
    compactedCellCount += r.getCompactedCellCount();
    compactingCellCount += r.getCompactingCellCount();
  }
  StringBuilder sb = Strings.appendKeyValue(new StringBuilder(), "requestsPerSecond",
        Double.valueOf(getRequestCountPerSecond()));
  Strings.appendKeyValue(sb, "numberOfOnlineRegions",
      Integer.valueOf(getRegionMetrics().size()));
  Strings.appendKeyValue(sb, "usedHeapMB", getUsedHeapSize());
  Strings.appendKeyValue(sb, "maxHeapMB", getMaxHeapSize());
  Strings.appendKeyValue(sb, "numberOfStores", storeCount);
  Strings.appendKeyValue(sb, "numberOfStorefiles", storeFileCount);
  Strings.appendKeyValue(sb, "storeRefCount", storeRefCount);
  Strings.appendKeyValue(sb, "maxCompactedStoreFileRefCount",
    maxCompactedStoreFileRefCount);
  Strings.appendKeyValue(sb, "storefileUncompressedSizeMB", uncompressedStoreFileSizeMB);
  Strings.appendKeyValue(sb, "storefileSizeMB", storeFileSizeMB);
  if (uncompressedStoreFileSizeMB != 0) {
    Strings.appendKeyValue(sb, "compressionRatio", String.format("%.4f",
        (float) storeFileSizeMB / (float) uncompressedStoreFileSizeMB));
  }
  Strings.appendKeyValue(sb, "memstoreSizeMB", memStoreSizeMB);
  Strings.appendKeyValue(sb, "readRequestsCount", readRequestsCount);
  Strings.appendKeyValue(sb, "cpRequestsCount", cpRequestsCount);
  Strings.appendKeyValue(sb, "filteredReadRequestsCount", filteredReadRequestsCount);
  Strings.appendKeyValue(sb, "writeRequestsCount", writeRequestsCount);
  Strings.appendKeyValue(sb, "rootIndexSizeKB", rootLevelIndexSizeKB);
  Strings.appendKeyValue(sb, "totalStaticIndexSizeKB", storefileIndexSizeKB);
  Strings.appendKeyValue(sb, "totalStaticBloomSizeKB", bloomFilterSizeMB);
  Strings.appendKeyValue(sb, "totalCompactingKVs", compactingCellCount);
  Strings.appendKeyValue(sb, "currentCompactedKVs", compactedCellCount);
  float compactionProgressPct = Float.NaN;
  if (compactingCellCount > 0) {
    compactionProgressPct =
        Float.valueOf((float) compactedCellCount / compactingCellCount);
  }
  Strings.appendKeyValue(sb, "compactionProgressPct", compactionProgressPct);
  Strings.appendKeyValue(sb, "coprocessors", getCoprocessorNames());
  return sb.toString();
}
 
Example #14
Source File: RegionMetricsBuilder.java    From hbase with Apache License 2.0 4 votes vote down vote up
@Override
public String toString() {
  StringBuilder sb = Strings.appendKeyValue(new StringBuilder(), "storeCount",
      this.getStoreCount());
  Strings.appendKeyValue(sb, "storeFileCount",
      this.getStoreFileCount());
  Strings.appendKeyValue(sb, "storeRefCount",
    this.getStoreRefCount());
  Strings.appendKeyValue(sb, "maxCompactedStoreFileRefCount",
    this.getMaxCompactedStoreFileRefCount());
  Strings.appendKeyValue(sb, "uncompressedStoreFileSize",
      this.getUncompressedStoreFileSize());
  Strings.appendKeyValue(sb, "lastMajorCompactionTimestamp",
      this.getLastMajorCompactionTimestamp());
  Strings.appendKeyValue(sb, "storeFileSize",
      this.getStoreFileSize());
  if (this.getUncompressedStoreFileSize().get() != 0) {
    Strings.appendKeyValue(sb, "compressionRatio",
        String.format("%.4f",
            (float) this.getStoreFileSize().get(Size.Unit.MEGABYTE) /
            (float) this.getUncompressedStoreFileSize().get(Size.Unit.MEGABYTE)));
  }
  Strings.appendKeyValue(sb, "memStoreSize",
      this.getMemStoreSize());
  Strings.appendKeyValue(sb, "readRequestCount",
      this.getReadRequestCount());
  Strings.appendKeyValue(sb, "cpRequestCount",
      this.getCpRequestCount());
  Strings.appendKeyValue(sb, "writeRequestCount",
      this.getWriteRequestCount());
  Strings.appendKeyValue(sb, "rootLevelIndexSize",
      this.getStoreFileRootLevelIndexSize());
  Strings.appendKeyValue(sb, "uncompressedDataIndexSize",
      this.getStoreFileUncompressedDataIndexSize());
  Strings.appendKeyValue(sb, "bloomFilterSize",
      this.getBloomFilterSize());
  Strings.appendKeyValue(sb, "compactingCellCount",
      this.getCompactingCellCount());
  Strings.appendKeyValue(sb, "compactedCellCount",
      this.getCompactedCellCount());
  float compactionProgressPct = Float.NaN;
  if (this.getCompactingCellCount() > 0) {
    compactionProgressPct = ((float) this.getCompactedCellCount() /
        (float) this.getCompactingCellCount());
  }
  Strings.appendKeyValue(sb, "compactionProgressPct",
      compactionProgressPct);
  Strings.appendKeyValue(sb, "completedSequenceId",
      this.getCompletedSequenceId());
  Strings.appendKeyValue(sb, "dataLocality",
      this.getDataLocality());
  Strings.appendKeyValue(sb, "dataLocalityForSsd",
      this.getDataLocalityForSsd());
  return sb.toString();
}
 
Example #15
Source File: HQuorumPeer.java    From hbase with Apache License 2.0 4 votes vote down vote up
static void writeMyID(Properties properties) throws IOException {
  long myId = -1;

  Configuration conf = HBaseConfiguration.create();
  String myAddress = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
      conf.get("hbase.zookeeper.dns.interface","default"),
      conf.get("hbase.zookeeper.dns.nameserver","default")));

  List<String> ips = new ArrayList<>();

  // Add what could be the best (configured) match
  ips.add(myAddress.contains(".") ?
      myAddress :
      StringUtils.simpleHostname(myAddress));

  // For all nics get all hostnames and IPs
  Enumeration<?> nics = NetworkInterface.getNetworkInterfaces();
  while(nics.hasMoreElements()) {
    Enumeration<?> rawAdrs =
        ((NetworkInterface)nics.nextElement()).getInetAddresses();
    while(rawAdrs.hasMoreElements()) {
      InetAddress inet = (InetAddress) rawAdrs.nextElement();
      ips.add(StringUtils.simpleHostname(inet.getHostName()));
      ips.add(inet.getHostAddress());
    }
  }

  for (Entry<Object, Object> entry : properties.entrySet()) {
    String key = entry.getKey().toString().trim();
    String value = entry.getValue().toString().trim();
    if (key.startsWith("server.")) {
      int dot = key.indexOf('.');
      long id = Long.parseLong(key.substring(dot + 1));
      String[] parts = value.split(":");
      String address = parts[0];
      if (addressIsLocalHost(address) || ips.contains(address)) {
        myId = id;
        break;
      }
    }
  }

  // Set the max session timeout from the provided client-side timeout
  properties.setProperty("maxSessionTimeout", conf.get(HConstants.ZK_SESSION_TIMEOUT,
          Integer.toString(HConstants.DEFAULT_ZK_SESSION_TIMEOUT)));

  if (myId == -1) {
    throw new IOException("Could not find my address: " + myAddress +
                          " in list of ZooKeeper quorum servers");
  }

  String dataDirStr = properties.get("dataDir").toString().trim();
  File dataDir = new File(dataDirStr);
  if (!dataDir.isDirectory()) {
    if (!dataDir.mkdirs()) {
      throw new IOException("Unable to create data dir " + dataDir);
    }
  }

  File myIdFile = new File(dataDir, "myid");
  PrintWriter w = new PrintWriter(myIdFile, StandardCharsets.UTF_8.name());
  w.println(myId);
  w.close();
}