Java Code Examples for org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider

The following examples show how to use org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: hadoop   Source File: TestDFSClientFailover.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Make sure that a helpful error message is shown if a proxy provider is
 * configured for a given URI, but no actual addresses are configured for that
 * URI.
 */
@Test
public void testFailureWithMisconfiguredHaNNs() throws Exception {
  String logicalHost = "misconfigured-ha-uri";
  Configuration conf = new Configuration();
  conf.set(DFS_CLIENT_FAILOVER_PROXY_PROVIDER_KEY_PREFIX + "." + logicalHost,
      ConfiguredFailoverProxyProvider.class.getName());
  
  URI uri = new URI("hdfs://" + logicalHost + "/test");
  try {
    FileSystem.get(uri, conf).exists(new Path("/test"));
    fail("Successfully got proxy provider for misconfigured FS");
  } catch (IOException ioe) {
    LOG.info("got expected exception", ioe);
    assertTrue("expected exception did not contain helpful message",
        StringUtils.stringifyException(ioe).contains(
        "Could not find any configured addresses for URI " + uri));
  }
}
 
Example 2
Source Project: hadoop   Source File: MiniQJMHACluster.java    License: Apache License 2.0 6 votes vote down vote up
private Configuration initHAConf(URI journalURI, Configuration conf) {
  conf.set(DFSConfigKeys.DFS_NAMENODE_SHARED_EDITS_DIR_KEY,
      journalURI.toString());
  
  String address1 = "127.0.0.1:" + basePort;
  String address2 = "127.0.0.1:" + (basePort + 2);
  conf.set(DFSUtil.addKeySuffixes(DFS_NAMENODE_RPC_ADDRESS_KEY,
      NAMESERVICE, NN1), address1);
  conf.set(DFSUtil.addKeySuffixes(DFS_NAMENODE_RPC_ADDRESS_KEY,
      NAMESERVICE, NN2), address2);
  conf.set(DFSConfigKeys.DFS_NAMESERVICES, NAMESERVICE);
  conf.set(DFSUtil.addKeySuffixes(DFS_HA_NAMENODES_KEY_PREFIX, NAMESERVICE),
      NN1 + "," + NN2);
  conf.set(DFS_CLIENT_FAILOVER_PROXY_PROVIDER_KEY_PREFIX + "." + NAMESERVICE,
      ConfiguredFailoverProxyProvider.class.getName());
  conf.set("fs.defaultFS", "hdfs://" + NAMESERVICE);
  
  return conf;
}
 
Example 3
Source Project: big-c   Source File: TestDFSClientFailover.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Make sure that a helpful error message is shown if a proxy provider is
 * configured for a given URI, but no actual addresses are configured for that
 * URI.
 */
@Test
public void testFailureWithMisconfiguredHaNNs() throws Exception {
  String logicalHost = "misconfigured-ha-uri";
  Configuration conf = new Configuration();
  conf.set(DFS_CLIENT_FAILOVER_PROXY_PROVIDER_KEY_PREFIX + "." + logicalHost,
      ConfiguredFailoverProxyProvider.class.getName());
  
  URI uri = new URI("hdfs://" + logicalHost + "/test");
  try {
    FileSystem.get(uri, conf).exists(new Path("/test"));
    fail("Successfully got proxy provider for misconfigured FS");
  } catch (IOException ioe) {
    LOG.info("got expected exception", ioe);
    assertTrue("expected exception did not contain helpful message",
        StringUtils.stringifyException(ioe).contains(
        "Could not find any configured addresses for URI " + uri));
  }
}
 
Example 4
Source Project: big-c   Source File: MiniQJMHACluster.java    License: Apache License 2.0 6 votes vote down vote up
private Configuration initHAConf(URI journalURI, Configuration conf) {
  conf.set(DFSConfigKeys.DFS_NAMENODE_SHARED_EDITS_DIR_KEY,
      journalURI.toString());
  
  String address1 = "127.0.0.1:" + basePort;
  String address2 = "127.0.0.1:" + (basePort + 2);
  conf.set(DFSUtil.addKeySuffixes(DFS_NAMENODE_RPC_ADDRESS_KEY,
      NAMESERVICE, NN1), address1);
  conf.set(DFSUtil.addKeySuffixes(DFS_NAMENODE_RPC_ADDRESS_KEY,
      NAMESERVICE, NN2), address2);
  conf.set(DFSConfigKeys.DFS_NAMESERVICES, NAMESERVICE);
  conf.set(DFSUtil.addKeySuffixes(DFS_HA_NAMENODES_KEY_PREFIX, NAMESERVICE),
      NN1 + "," + NN2);
  conf.set(DFS_CLIENT_FAILOVER_PROXY_PROVIDER_KEY_PREFIX + "." + NAMESERVICE,
      ConfiguredFailoverProxyProvider.class.getName());
  conf.set("fs.defaultFS", "hdfs://" + NAMESERVICE);
  
  return conf;
}
 
Example 5
Source Project: hadoop   Source File: TestDFSUtil.java    License: Apache License 2.0 5 votes vote down vote up
private static Configuration createWebHDFSHAConfiguration(String logicalHostName, String nnaddr1, String nnaddr2) {
  HdfsConfiguration conf = new HdfsConfiguration();

  conf.set(DFS_NAMESERVICES, "ns1");
  conf.set(DFSUtil.addKeySuffixes(DFS_HA_NAMENODES_KEY_PREFIX, "ns1"),"nn1,nn2");
  conf.set(DFSUtil.addKeySuffixes(
      DFS_NAMENODE_HTTP_ADDRESS_KEY, "ns1", "nn1"), nnaddr1);
  conf.set(DFSUtil.addKeySuffixes(
      DFS_NAMENODE_HTTP_ADDRESS_KEY, "ns1", "nn2"), nnaddr2);

  conf.set(DFS_CLIENT_FAILOVER_PROXY_PROVIDER_KEY_PREFIX + "." + logicalHostName,
      ConfiguredFailoverProxyProvider.class.getName());
  return conf;
}
 
Example 6
private void initClientHAConf(int nn1port, int nn2port) throws Exception {
  hsf.setHomeDir("test-case");
  hsf.setNameNodeURL("hdfs://ns1");
  File confFile = new File(getName());
  String conf = "<configuration>\n             "
      + "  <property>\n                                    "
      + "    <name>dfs.nameservices</name>\n               "
      + "    <value>ns1</value>\n                          "
      + "  </property>\n                                   "
      + "  <property>\n                                    "
      + "    <name>dfs.ha.namenodes.ns1</name>\n           "
      + "    <value>nn1,nn2</value>\n                      "
      + "  </property>\n                                   "
      + "  <property>\n                                    "
      + "    <name>dfs.namenode.rpc-address.ns1.nn1</name>\n"
      + "    <value>hdfs://127.0.0.1:" + nn1port + "</value>\n"
      + "  </property>\n                                   "
      + "  <property>\n                                    "
      + "    <name>dfs.namenode.rpc-address.ns1.nn2</name>\n"
      + "    <value>hdfs://127.0.0.1:" + nn2port + "</value>\n"
      + "  </property>\n                                   "
      + "  <property>\n                                    "
      + "    <name>dfs.client.failover.proxy.provider.ns1</name>\n"
      + "    <value>" + ConfiguredFailoverProxyProvider.class.getName() + "</value>\n"
      + "  </property>\n                                   "
      + "</configuration>";
  setConfigFile(hsf, confFile, conf);
}
 
Example 7
Source Project: big-c   Source File: TestDFSUtil.java    License: Apache License 2.0 5 votes vote down vote up
private static Configuration createWebHDFSHAConfiguration(String logicalHostName, String nnaddr1, String nnaddr2) {
  HdfsConfiguration conf = new HdfsConfiguration();

  conf.set(DFS_NAMESERVICES, "ns1");
  conf.set(DFSUtil.addKeySuffixes(DFS_HA_NAMENODES_KEY_PREFIX, "ns1"),"nn1,nn2");
  conf.set(DFSUtil.addKeySuffixes(
      DFS_NAMENODE_HTTP_ADDRESS_KEY, "ns1", "nn1"), nnaddr1);
  conf.set(DFSUtil.addKeySuffixes(
      DFS_NAMENODE_HTTP_ADDRESS_KEY, "ns1", "nn2"), nnaddr2);

  conf.set(DFS_CLIENT_FAILOVER_PROXY_PROVIDER_KEY_PREFIX + "." + logicalHostName,
      ConfiguredFailoverProxyProvider.class.getName());
  return conf;
}
 
Example 8
private void initClientHAConf(int nn1port, int nn2port) throws Exception {
  hsf.setHomeDir("test-case");
  hsf.setNameNodeURL("hdfs://ns1");
  File confFile = new File(getName());
  String conf = "<configuration>\n             "
      + "  <property>\n                                    "
      + "    <name>dfs.nameservices</name>\n               "
      + "    <value>ns1</value>\n                          "
      + "  </property>\n                                   "
      + "  <property>\n                                    "
      + "    <name>dfs.ha.namenodes.ns1</name>\n           "
      + "    <value>nn1,nn2</value>\n                      "
      + "  </property>\n                                   "
      + "  <property>\n                                    "
      + "    <name>dfs.namenode.rpc-address.ns1.nn1</name>\n"
      + "    <value>hdfs://127.0.0.1:" + nn1port + "</value>\n"
      + "  </property>\n                                   "
      + "  <property>\n                                    "
      + "    <name>dfs.namenode.rpc-address.ns1.nn2</name>\n"
      + "    <value>hdfs://127.0.0.1:" + nn2port + "</value>\n"
      + "  </property>\n                                   "
      + "  <property>\n                                    "
      + "    <name>dfs.client.failover.proxy.provider.ns1</name>\n"
      + "    <value>" + ConfiguredFailoverProxyProvider.class.getName() + "</value>\n"
      + "  </property>\n                                   "
      + "</configuration>";
  setConfigFile(hsf, confFile, conf);
}