Java Code Examples for org.apache.hadoop.net.NetUtils#getDefaultSocketFactory()

The following examples show how to use org.apache.hadoop.net.NetUtils#getDefaultSocketFactory() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ContainerOperationClient.java    From hadoop-ozone with Apache License 2.0 6 votes vote down vote up
public static StorageContainerLocationProtocol newContainerRpcClient(
    ConfigurationSource configSource) throws IOException {

  Class<StorageContainerLocationProtocolPB> protocol =
      StorageContainerLocationProtocolPB.class;
  Configuration conf =
      LegacyHadoopConfigurationSource.asHadoopConfiguration(configSource);
  RPC.setProtocolEngine(conf, protocol, ProtobufRpcEngine.class);
  long version = RPC.getProtocolVersion(protocol);
  InetSocketAddress scmAddress = getScmAddressForClients(configSource);
  UserGroupInformation user = UserGroupInformation.getCurrentUser();
  SocketFactory socketFactory = NetUtils.getDefaultSocketFactory(conf);
  int rpcTimeOut = Client.getRpcTimeout(conf);

  StorageContainerLocationProtocolPB rpcProxy =
      RPC.getProxy(protocol, version, scmAddress, user, conf,
          socketFactory, rpcTimeOut);

  StorageContainerLocationProtocolClientSideTranslatorPB client =
      new StorageContainerLocationProtocolClientSideTranslatorPB(rpcProxy);
  return TracingUtil.createProxy(
      client, StorageContainerLocationProtocol.class, configSource);
}
 
Example 2
Source File: AbstractHadoopProcessor.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
protected void checkHdfsUriForTimeout(Configuration config) throws IOException {
    URI hdfsUri = FileSystem.getDefaultUri(config);
    String address = hdfsUri.getAuthority();
    int port = hdfsUri.getPort();
    if (address == null || address.isEmpty() || port < 0) {
        return;
    }
    InetSocketAddress namenode = NetUtils.createSocketAddr(address, port);
    SocketFactory socketFactory = NetUtils.getDefaultSocketFactory(config);
    Socket socket = null;
    try {
        socket = socketFactory.createSocket();
        NetUtils.connect(socket, namenode, 1000); // 1 second timeout
    } finally {
        IOUtils.closeQuietly(socket);
    }
}
 
Example 3
Source File: AbstractHdfsConnector.java    From pulsar with Apache License 2.0 6 votes vote down vote up
protected void checkHdfsUriForTimeout(Configuration config) throws IOException {
    URI hdfsUri = FileSystem.getDefaultUri(config);
    String address = hdfsUri.getAuthority();
    int port = hdfsUri.getPort();
    if (address == null || address.isEmpty() || port < 0) {
        return;
    }
    InetSocketAddress namenode = NetUtils.createSocketAddr(address, port);
    SocketFactory socketFactory = NetUtils.getDefaultSocketFactory(config);
    Socket socket = null;
    try {
        socket = socketFactory.createSocket();
        NetUtils.connect(socket, namenode, 1000); // 1 second timeout
    } finally {
        IOUtils.closeQuietly(socket);
    }
}
 
Example 4
Source File: AbstractHdfsConnector.java    From pulsar with Apache License 2.0 6 votes vote down vote up
protected void checkHdfsUriForTimeout(Configuration config) throws IOException {
    URI hdfsUri = FileSystem.getDefaultUri(config);
    String address = hdfsUri.getAuthority();
    int port = hdfsUri.getPort();
    if (address == null || address.isEmpty() || port < 0) {
        return;
    }
    InetSocketAddress namenode = NetUtils.createSocketAddr(address, port);
    SocketFactory socketFactory = NetUtils.getDefaultSocketFactory(config);
    Socket socket = null;
    try {
        socket = socketFactory.createSocket();
        NetUtils.connect(socket, namenode, 1000); // 1 second timeout
    } finally {
        IOUtils.closeQuietly(socket);
    }
}
 
Example 5
Source File: RecoverableRpcProxy.java    From Bats with Apache License 2.0 5 votes vote down vote up
public RecoverableRpcProxy(String appPath, Configuration conf)
{
  this.conf = conf;
  try {
    currentUser = UserGroupInformation.getCurrentUser();
    defaultSocketFactory = NetUtils.getDefaultSocketFactory(conf);
    fsRecoveryHandler = new FSRecoveryHandler(appPath, conf);
    connect(0);
  } catch (IOException e) {
    LOG.error("Fail to create RecoverableRpcProxy", e);
    throw new RuntimeException(e);
  }
}
 
Example 6
Source File: RecoverableRpcProxy.java    From attic-apex-core with Apache License 2.0 5 votes vote down vote up
public RecoverableRpcProxy(String appPath, Configuration conf)
{
  this.conf = conf;
  try {
    currentUser = UserGroupInformation.getCurrentUser();
    defaultSocketFactory = NetUtils.getDefaultSocketFactory(conf);
    fsRecoveryHandler = new FSRecoveryHandler(appPath, conf);
    connect(0);
  } catch (IOException e) {
    LOG.error("Fail to create RecoverableRpcProxy", e);
    throw new RuntimeException(e);
  }
}
 
Example 7
Source File: TestSocketFactory.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Test
public void testSocketFactoryAsKeyInMap() {
  Map<SocketFactory, Integer> dummyCache = new HashMap<SocketFactory, Integer>();
  int toBeCached1 = 1;
  int toBeCached2 = 2;
  Configuration conf = new Configuration();
  conf.set(CommonConfigurationKeys.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY,
      "org.apache.hadoop.ipc.TestSocketFactory$DummySocketFactory");
  final SocketFactory dummySocketFactory = NetUtils
      .getDefaultSocketFactory(conf);
  dummyCache.put(dummySocketFactory, toBeCached1);

  conf.set(CommonConfigurationKeys.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY,
      "org.apache.hadoop.net.StandardSocketFactory");
  final SocketFactory defaultSocketFactory = NetUtils
      .getDefaultSocketFactory(conf);
  dummyCache.put(defaultSocketFactory, toBeCached2);

  Assert
      .assertEquals("The cache contains two elements", 2, dummyCache.size());
  Assert.assertEquals("Equals of both socket factory shouldn't be same",
      defaultSocketFactory.equals(dummySocketFactory), false);

  assertSame(toBeCached2, dummyCache.remove(defaultSocketFactory));
  dummyCache.put(defaultSocketFactory, toBeCached2);
  assertSame(toBeCached1, dummyCache.remove(dummySocketFactory));

}
 
Example 8
Source File: HAServiceTarget.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * @return a proxy to connect to the target HA Service.
 */
public HAServiceProtocol getProxy(Configuration conf, int timeoutMs)
    throws IOException {
  Configuration confCopy = new Configuration(conf);
  // Lower the timeout so we quickly fail to connect
  confCopy.setInt(CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 1);
  SocketFactory factory = NetUtils.getDefaultSocketFactory(confCopy);
  return new HAServiceProtocolClientSideTranslatorPB(
      getAddress(),
      confCopy, factory, timeoutMs);
}
 
Example 9
Source File: TestIsMethodSupported.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Test
public void testInterDatanodeProtocol() throws IOException {
  InterDatanodeProtocolTranslatorPB translator = 
      new InterDatanodeProtocolTranslatorPB(
          nnAddress, UserGroupInformation.getCurrentUser(), conf,
          NetUtils.getDefaultSocketFactory(conf), 0);
  //Not supported at namenode
  assertFalse(translator.isMethodSupported("initReplicaRecovery"));
  
  translator = new InterDatanodeProtocolTranslatorPB(
      dnAddress, UserGroupInformation.getCurrentUser(), conf,
      NetUtils.getDefaultSocketFactory(conf), 0);
  assertTrue(translator.isMethodSupported("initReplicaRecovery"));
}
 
Example 10
Source File: TestIsMethodSupported.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Test
public void testClientDatanodeProtocol() throws IOException {
  ClientDatanodeProtocolTranslatorPB translator = 
      new ClientDatanodeProtocolTranslatorPB(nnAddress, 
          UserGroupInformation.getCurrentUser(), conf,
      NetUtils.getDefaultSocketFactory(conf));
  //Namenode doesn't implement ClientDatanodeProtocol
  assertFalse(translator.isMethodSupported("refreshNamenodes"));
  
  translator = new ClientDatanodeProtocolTranslatorPB(
      dnAddress, UserGroupInformation.getCurrentUser(), conf,
      NetUtils.getDefaultSocketFactory(conf));
  assertTrue(translator.isMethodSupported("refreshNamenodes"));
}
 
Example 11
Source File: TestSocketFactory.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Test
public void testSocketFactoryAsKeyInMap() {
  Map<SocketFactory, Integer> dummyCache = new HashMap<SocketFactory, Integer>();
  int toBeCached1 = 1;
  int toBeCached2 = 2;
  Configuration conf = new Configuration();
  conf.set(CommonConfigurationKeys.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY,
      "org.apache.hadoop.ipc.TestSocketFactory$DummySocketFactory");
  final SocketFactory dummySocketFactory = NetUtils
      .getDefaultSocketFactory(conf);
  dummyCache.put(dummySocketFactory, toBeCached1);

  conf.set(CommonConfigurationKeys.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY,
      "org.apache.hadoop.net.StandardSocketFactory");
  final SocketFactory defaultSocketFactory = NetUtils
      .getDefaultSocketFactory(conf);
  dummyCache.put(defaultSocketFactory, toBeCached2);

  Assert
      .assertEquals("The cache contains two elements", 2, dummyCache.size());
  Assert.assertEquals("Equals of both socket factory shouldn't be same",
      defaultSocketFactory.equals(dummySocketFactory), false);

  assertSame(toBeCached2, dummyCache.remove(defaultSocketFactory));
  dummyCache.put(defaultSocketFactory, toBeCached2);
  assertSame(toBeCached1, dummyCache.remove(dummySocketFactory));

}
 
Example 12
Source File: HAServiceTarget.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/**
 * @return a proxy to the ZKFC which is associated with this HA service.
 */
public ZKFCProtocol getZKFCProxy(Configuration conf, int timeoutMs)
    throws IOException {
  Configuration confCopy = new Configuration(conf);
  // Lower the timeout so we quickly fail to connect
  confCopy.setInt(CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 1);
  SocketFactory factory = NetUtils.getDefaultSocketFactory(confCopy);
  return new ZKFCProtocolClientSideTranslatorPB(
      getZKFCAddress(),
      confCopy, factory, timeoutMs);
}
 
Example 13
Source File: HAServiceTarget.java    From hadoop with Apache License 2.0 5 votes vote down vote up
private HAServiceProtocol getProxyForAddress(Configuration conf,
    int timeoutMs, InetSocketAddress addr) throws IOException {
  Configuration confCopy = new Configuration(conf);
  // Lower the timeout so we quickly fail to connect
  confCopy.setInt(
      CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 1);
  SocketFactory factory = NetUtils.getDefaultSocketFactory(confCopy);
  return new HAServiceProtocolClientSideTranslatorPB(
      addr,
      confCopy, factory, timeoutMs);
}
 
Example 14
Source File: TestIsMethodSupported.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Test
public void testInterDatanodeProtocol() throws IOException {
  InterDatanodeProtocolTranslatorPB translator = 
      new InterDatanodeProtocolTranslatorPB(
          nnAddress, UserGroupInformation.getCurrentUser(), conf,
          NetUtils.getDefaultSocketFactory(conf), 0);
  //Not supported at namenode
  assertFalse(translator.isMethodSupported("initReplicaRecovery"));
  
  translator = new InterDatanodeProtocolTranslatorPB(
      dnAddress, UserGroupInformation.getCurrentUser(), conf,
      NetUtils.getDefaultSocketFactory(conf), 0);
  assertTrue(translator.isMethodSupported("initReplicaRecovery"));
}
 
Example 15
Source File: TestIsMethodSupported.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Test
public void testClientDatanodeProtocol() throws IOException {
  ClientDatanodeProtocolTranslatorPB translator = 
      new ClientDatanodeProtocolTranslatorPB(nnAddress, 
          UserGroupInformation.getCurrentUser(), conf,
      NetUtils.getDefaultSocketFactory(conf));
  //Namenode doesn't implement ClientDatanodeProtocol
  assertFalse(translator.isMethodSupported("refreshNamenodes"));
  
  translator = new ClientDatanodeProtocolTranslatorPB(
      dnAddress, UserGroupInformation.getCurrentUser(), conf,
      NetUtils.getDefaultSocketFactory(conf));
  assertTrue(translator.isMethodSupported("refreshNamenodes"));
}
 
Example 16
Source File: Client.java    From hadoop with Apache License 2.0 2 votes vote down vote up
/**
 * Construct an IPC client with the default SocketFactory
 * @param valueClass
 * @param conf
 */
public Client(Class<? extends Writable> valueClass, Configuration conf) {
  this(valueClass, conf, NetUtils.getDefaultSocketFactory(conf));
}
 
Example 17
Source File: Client.java    From hadoop-gpu with Apache License 2.0 2 votes vote down vote up
/**
 * Construct an IPC client with the default SocketFactory
 * @param valueClass
 * @param conf
 */
public Client(Class<? extends Writable> valueClass, Configuration conf) {
  this(valueClass, conf, NetUtils.getDefaultSocketFactory(conf));
}
 
Example 18
Source File: Client.java    From big-c with Apache License 2.0 2 votes vote down vote up
/**
 * Construct an IPC client with the default SocketFactory
 * @param valueClass
 * @param conf
 */
public Client(Class<? extends Writable> valueClass, Configuration conf) {
  this(valueClass, conf, NetUtils.getDefaultSocketFactory(conf));
}
 
Example 19
Source File: BlockingRpcClient.java    From hbase with Apache License 2.0 2 votes vote down vote up
/**
 * Construct an IPC client for the cluster {@code clusterId} with the default SocketFactory This
 * method is called with reflection by the RpcClientFactory to create an instance
 * @param conf configuration
 * @param clusterId the cluster id
 * @param localAddr client socket bind address.
 * @param metrics the connection metrics
 */
public BlockingRpcClient(Configuration conf, String clusterId, SocketAddress localAddr,
    MetricsConnection metrics) {
  super(conf, clusterId, localAddr, metrics);
  this.socketFactory = NetUtils.getDefaultSocketFactory(conf);
}
 
Example 20
Source File: Client.java    From RDFS with Apache License 2.0 2 votes vote down vote up
/**
 * Construct an IPC client with the default SocketFactory
 * @param valueClass
 * @param conf
 */
public Client(Class<? extends Writable> valueClass, Configuration conf) {
  this(valueClass, conf, NetUtils.getDefaultSocketFactory(conf));
}