Java Code Examples for org.apache.hadoop.net.DNS#getDefaultIP()
The following examples show how to use
org.apache.hadoop.net.DNS#getDefaultIP() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: NNThroughputBenchmark.java From hadoop with Apache License 2.0 | 6 votes |
void register() throws IOException { // get versions from the namenode nsInfo = nameNodeProto.versionRequest(); dnRegistration = new DatanodeRegistration( new DatanodeID(DNS.getDefaultIP("default"), DNS.getDefaultHost("default", "default"), DataNode.generateUuid(), getNodePort(dnIdx), DFSConfigKeys.DFS_DATANODE_HTTP_DEFAULT_PORT, DFSConfigKeys.DFS_DATANODE_HTTPS_DEFAULT_PORT, DFSConfigKeys.DFS_DATANODE_IPC_DEFAULT_PORT), new DataStorage(nsInfo), new ExportedBlockKeys(), VersionInfo.getVersion()); // register datanode dnRegistration = nameNodeProto.registerDatanode(dnRegistration); //first block reports storage = new DatanodeStorage(DatanodeStorage.generateUuid()); final StorageBlockReport[] reports = { new StorageBlockReport(storage, BlockListAsLongs.EMPTY) }; nameNodeProto.blockReport(dnRegistration, nameNode.getNamesystem().getBlockPoolId(), reports, new BlockReportContext(1, 0, System.nanoTime())); }
Example 2
Source File: NNThroughputBenchmark.java From big-c with Apache License 2.0 | 6 votes |
void register() throws IOException { // get versions from the namenode nsInfo = nameNodeProto.versionRequest(); dnRegistration = new DatanodeRegistration( new DatanodeID(DNS.getDefaultIP("default"), DNS.getDefaultHost("default", "default"), DataNode.generateUuid(), getNodePort(dnIdx), DFSConfigKeys.DFS_DATANODE_HTTP_DEFAULT_PORT, DFSConfigKeys.DFS_DATANODE_HTTPS_DEFAULT_PORT, DFSConfigKeys.DFS_DATANODE_IPC_DEFAULT_PORT), new DataStorage(nsInfo), new ExportedBlockKeys(), VersionInfo.getVersion()); // register datanode dnRegistration = nameNodeProto.registerDatanode(dnRegistration); //first block reports storage = new DatanodeStorage(DatanodeStorage.generateUuid()); final StorageBlockReport[] reports = { new StorageBlockReport(storage, BlockListAsLongs.EMPTY) }; nameNodeProto.blockReport(dnRegistration, nameNode.getNamesystem().getBlockPoolId(), reports, new BlockReportContext(1, 0, System.nanoTime())); }
Example 3
Source File: DataNodeCluster.java From RDFS with Apache License 2.0 | 6 votes |
static private String getUniqueRackPrefix() { String ip = "unknownIP"; try { ip = DNS.getDefaultIP("default"); } catch (UnknownHostException ignored) { System.out.println("Could not find ip address of \"default\" inteface."); } int rand = 0; try { rand = SecureRandom.getInstance("SHA1PRNG").nextInt(Integer.MAX_VALUE); } catch (NoSuchAlgorithmException e) { rand = (new Random()).nextInt(Integer.MAX_VALUE); } return "/Rack-" + rand + "-"+ ip + "-" + System.currentTimeMillis(); }
Example 4
Source File: DataNodeCluster.java From hadoop-gpu with Apache License 2.0 | 6 votes |
static private String getUniqueRackPrefix() { String ip = "unknownIP"; try { ip = DNS.getDefaultIP("default"); } catch (UnknownHostException ignored) { System.out.println("Could not find ip address of \"default\" inteface."); } int rand = 0; try { rand = SecureRandom.getInstance("SHA1PRNG").nextInt(Integer.MAX_VALUE); } catch (NoSuchAlgorithmException e) { rand = (new Random()).nextInt(Integer.MAX_VALUE); } return "/Rack-" + rand + "-"+ ip + "-" + System.currentTimeMillis(); }
Example 5
Source File: NNStorage.java From hadoop with Apache License 2.0 | 5 votes |
/** * Generate new blockpoolID. * * @return new blockpoolID */ static String newBlockPoolID() throws UnknownHostException{ String ip = "unknownIP"; try { ip = DNS.getDefaultIP("default"); } catch (UnknownHostException e) { LOG.warn("Could not find ip address of \"default\" inteface."); throw e; } int rand = DFSUtil.getSecureRandom().nextInt(Integer.MAX_VALUE); String bpid = "BP-" + rand + "-"+ ip + "-" + Time.now(); return bpid; }
Example 6
Source File: DataNodeCluster.java From hadoop with Apache License 2.0 | 5 votes |
static private String getUniqueRackPrefix() { String ip = "unknownIP"; try { ip = DNS.getDefaultIP("default"); } catch (UnknownHostException ignored) { System.out.println("Could not find ip address of \"default\" inteface."); } int rand = DFSUtil.getSecureRandom().nextInt(Integer.MAX_VALUE); return "/Rack-" + rand + "-"+ ip + "-" + Time.now(); }
Example 7
Source File: NNStorage.java From big-c with Apache License 2.0 | 5 votes |
/** * Generate new blockpoolID. * * @return new blockpoolID */ static String newBlockPoolID() throws UnknownHostException{ String ip = "unknownIP"; try { ip = DNS.getDefaultIP("default"); } catch (UnknownHostException e) { LOG.warn("Could not find ip address of \"default\" inteface."); throw e; } int rand = DFSUtil.getSecureRandom().nextInt(Integer.MAX_VALUE); String bpid = "BP-" + rand + "-"+ ip + "-" + Time.now(); return bpid; }
Example 8
Source File: DataNodeCluster.java From big-c with Apache License 2.0 | 5 votes |
static private String getUniqueRackPrefix() { String ip = "unknownIP"; try { ip = DNS.getDefaultIP("default"); } catch (UnknownHostException ignored) { System.out.println("Could not find ip address of \"default\" inteface."); } int rand = DFSUtil.getSecureRandom().nextInt(Integer.MAX_VALUE); return "/Rack-" + rand + "-"+ ip + "-" + Time.now(); }
Example 9
Source File: DataNode.java From RDFS with Apache License 2.0 | 5 votes |
public static String createNewStorageId(int port) { /* Return * "DS-randInt-ipaddr-currentTimeMillis" * It is considered extermely rare for all these numbers to match * on a different machine accidentally for the following * a) SecureRandom(INT_MAX) is pretty much random (1 in 2 billion), and * b) Good chance ip address would be different, and * c) Even on the same machine, Datanode is designed to use different ports. * d) Good chance that these are started at different times. * For a confict to occur all the 4 above have to match!. * The format of this string can be changed anytime in future without * affecting its functionality. */ String ip = "unknownIP"; try { ip = DNS.getDefaultIP("default"); } catch (UnknownHostException ignored) { LOG.warn("Could not find ip address of \"default\" inteface."); } int rand = 0; try { rand = SecureRandom.getInstance("SHA1PRNG").nextInt(Integer.MAX_VALUE); } catch (NoSuchAlgorithmException e) { LOG.warn("Could not use SecureRandom"); rand = R.nextInt(Integer.MAX_VALUE); } return "DS-" + rand + "-"+ ip + "-" + port + "-" + System.currentTimeMillis(); }
Example 10
Source File: DataNode.java From hadoop-gpu with Apache License 2.0 | 5 votes |
public static void setNewStorageID(DatanodeRegistration dnReg) { /* Return * "DS-randInt-ipaddr-currentTimeMillis" * It is considered extermely rare for all these numbers to match * on a different machine accidentally for the following * a) SecureRandom(INT_MAX) is pretty much random (1 in 2 billion), and * b) Good chance ip address would be different, and * c) Even on the same machine, Datanode is designed to use different ports. * d) Good chance that these are started at different times. * For a confict to occur all the 4 above have to match!. * The format of this string can be changed anytime in future without * affecting its functionality. */ String ip = "unknownIP"; try { ip = DNS.getDefaultIP("default"); } catch (UnknownHostException ignored) { LOG.warn("Could not find ip address of \"default\" inteface."); } int rand = 0; try { rand = SecureRandom.getInstance("SHA1PRNG").nextInt(Integer.MAX_VALUE); } catch (NoSuchAlgorithmException e) { LOG.warn("Could not use SecureRandom"); rand = R.nextInt(Integer.MAX_VALUE); } dnReg.storageID = "DS-" + rand + "-"+ ip + "-" + dnReg.getPort() + "-" + System.currentTimeMillis(); }