Java Code Examples for org.apache.hadoop.hdfs.web.URLConnectionFactory

The following examples show how to use org.apache.hadoop.hdfs.web.URLConnectionFactory. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
@BeforeClass public static void setUp() throws Exception {
  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  conf = new OzoneConfiguration();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(
      TestStorageContainerManagerHttpServer.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  connectionFactory =
      URLConnectionFactory.newDefaultURLConnectionFactory(conf);
  conf.set(OzoneConfigKeys.OZONE_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
      KeyStoreTestUtil.getClientSSLConfigFileName());
  conf.set(OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
      KeyStoreTestUtil.getServerSSLConfigFileName());
}
 
Example 2
Source Project: hadoop-ozone   Source File: TestReconUtils.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testMakeHttpCall() throws Exception {
  String url = "http://localhost:9874/dbCheckpoint";
  File file1 = Paths.get(folder.getRoot().getPath(), "file1")
      .toFile();
  BufferedWriter writer = new BufferedWriter(new FileWriter(
      file1.getAbsolutePath()));
  writer.write("File 1 Contents");
  writer.close();
  InputStream fileInputStream = new FileInputStream(file1);

  String contents;
  URLConnectionFactory connectionFactoryMock =
      mock(URLConnectionFactory.class);
  URLConnection urlConnectionMock = mock(URLConnection.class);
  when(urlConnectionMock.getInputStream()).thenReturn(fileInputStream);
  when(connectionFactoryMock.openConnection(any(URL.class), anyBoolean()))
      .thenReturn(urlConnectionMock);
  try (InputStream inputStream = new ReconUtils()
      .makeHttpCall(connectionFactoryMock, url, false)) {
    contents = IOUtils.toString(inputStream, Charset.defaultCharset());
  }

  assertEquals("File 1 Contents", contents);
}
 
Example 3
Source Project: hadoop-ozone   Source File: TestOzoneManagerHttpServer.java    License: Apache License 2.0 6 votes vote down vote up
@BeforeClass public static void setUp() throws Exception {
  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  conf = new OzoneConfiguration();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(
      TestOzoneManagerHttpServer.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  connectionFactory =
      URLConnectionFactory.newDefaultURLConnectionFactory(conf);
  conf.set(OzoneConfigKeys.OZONE_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
      KeyStoreTestUtil.getClientSSLConfigFileName());
  conf.set(OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
      KeyStoreTestUtil.getServerSSLConfigFileName());
}
 
Example 4
Source Project: hadoop   Source File: TestEditLogFileInputStream.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testReadURL() throws Exception {
  HttpURLConnection conn = mock(HttpURLConnection.class);
  doReturn(new ByteArrayInputStream(FAKE_LOG_DATA)).when(conn).getInputStream();
  doReturn(HttpURLConnection.HTTP_OK).when(conn).getResponseCode();
  doReturn(Integer.toString(FAKE_LOG_DATA.length)).when(conn).getHeaderField("Content-Length");

  URLConnectionFactory factory = mock(URLConnectionFactory.class);
  doReturn(conn).when(factory).openConnection(Mockito.<URL> any(),
      anyBoolean());

  URL url = new URL("http://localhost/fakeLog");
  EditLogInputStream elis = EditLogFileInputStream.fromUrl(factory, url,
      HdfsConstants.INVALID_TXID, HdfsConstants.INVALID_TXID, false);
  // Read the edit log and verify that we got all of the data.
  EnumMap<FSEditLogOpCodes, Holder<Integer>> counts = FSImageTestUtil
      .countEditLogOpTypes(elis);
  assertThat(counts.get(FSEditLogOpCodes.OP_ADD).held, is(1));
  assertThat(counts.get(FSEditLogOpCodes.OP_SET_GENSTAMP_V1).held, is(1));
  assertThat(counts.get(FSEditLogOpCodes.OP_CLOSE).held, is(1));

  // Check that length header was picked up.
  assertEquals(FAKE_LOG_DATA.length, elis.length());
  elis.close();
}
 
Example 5
Source Project: big-c   Source File: TestEditLogFileInputStream.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testReadURL() throws Exception {
  HttpURLConnection conn = mock(HttpURLConnection.class);
  doReturn(new ByteArrayInputStream(FAKE_LOG_DATA)).when(conn).getInputStream();
  doReturn(HttpURLConnection.HTTP_OK).when(conn).getResponseCode();
  doReturn(Integer.toString(FAKE_LOG_DATA.length)).when(conn).getHeaderField("Content-Length");

  URLConnectionFactory factory = mock(URLConnectionFactory.class);
  doReturn(conn).when(factory).openConnection(Mockito.<URL> any(),
      anyBoolean());

  URL url = new URL("http://localhost/fakeLog");
  EditLogInputStream elis = EditLogFileInputStream.fromUrl(factory, url,
      HdfsConstants.INVALID_TXID, HdfsConstants.INVALID_TXID, false);
  // Read the edit log and verify that we got all of the data.
  EnumMap<FSEditLogOpCodes, Holder<Integer>> counts = FSImageTestUtil
      .countEditLogOpTypes(elis);
  assertThat(counts.get(FSEditLogOpCodes.OP_ADD).held, is(1));
  assertThat(counts.get(FSEditLogOpCodes.OP_SET_GENSTAMP_V1).held, is(1));
  assertThat(counts.get(FSEditLogOpCodes.OP_CLOSE).held, is(1));

  // Check that length header was picked up.
  assertEquals(FAKE_LOG_DATA.length, elis.length());
  elis.close();
}
 
Example 6
Source Project: hadoop-ozone   Source File: ReconUtils.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Make HTTP GET call on the URL and return inputstream to the response.
 * @param connectionFactory URLConnectionFactory to use.
 * @param url url to call
 * @param isSpnego is SPNEGO enabled
 * @return Inputstream to the response of the HTTP call.
 * @throws IOException, AuthenticationException While reading the response.
 */
public InputStream makeHttpCall(URLConnectionFactory connectionFactory,
                                String url, boolean isSpnego)
    throws IOException, AuthenticationException {
  URLConnection urlConnection =
        connectionFactory.openConnection(new URL(url), isSpnego);
  urlConnection.connect();
  return urlConnection.getInputStream();
}
 
Example 7
Source Project: hadoop-ozone   Source File: OzoneManagerSnapshotProvider.java    License: Apache License 2.0 5 votes vote down vote up
public OzoneManagerSnapshotProvider(ConfigurationSource conf,
    File omRatisSnapshotDir, List<OMNodeDetails> peerNodes) {

  LOG.info("Initializing OM Snapshot Provider");
  this.omSnapshotDir = omRatisSnapshotDir;

  this.peerNodesMap = new HashMap<>();
  for (OMNodeDetails peerNode : peerNodes) {
    this.peerNodesMap.put(peerNode.getOMNodeId(), peerNode);
  }

  this.httpPolicy = HttpConfig.getHttpPolicy(conf);
  this.spnegoEnabled = conf.get(OZONE_OM_HTTP_AUTH_TYPE, "simple")
      .equals("kerberos");

  TimeUnit connectionTimeoutUnit =
      OZONE_OM_SNAPSHOT_PROVIDER_CONNECTION_TIMEOUT_DEFAULT.getUnit();
  int connectionTimeoutMS = (int) conf.getTimeDuration(
      OZONE_OM_SNAPSHOT_PROVIDER_CONNECTION_TIMEOUT_KEY,
      OZONE_OM_SNAPSHOT_PROVIDER_CONNECTION_TIMEOUT_DEFAULT.getDuration(),
      connectionTimeoutUnit);

  TimeUnit requestTimeoutUnit =
      OZONE_OM_SNAPSHOT_PROVIDER_REQUEST_TIMEOUT_DEFAULT.getUnit();
  int requestTimeoutMS = (int) conf.getTimeDuration(
      OZONE_OM_SNAPSHOT_PROVIDER_REQUEST_TIMEOUT_KEY,
      OZONE_OM_SNAPSHOT_PROVIDER_REQUEST_TIMEOUT_DEFAULT.getDuration(),
      requestTimeoutUnit);

  connectionFactory = URLConnectionFactory
    .newDefaultURLConnectionFactory(connectionTimeoutMS, requestTimeoutMS,
          LegacyHadoopConfigurationSource.asHadoopConfiguration(conf));
}
 
Example 8
Source Project: hadoop   Source File: DFSck.java    License: Apache License 2.0 5 votes vote down vote up
public DFSck(Configuration conf, PrintStream out) throws IOException {
  super(conf);
  this.ugi = UserGroupInformation.getCurrentUser();
  this.out = out;
  this.connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);
  this.isSpnegoEnabled = UserGroupInformation.isSecurityEnabled();
}
 
Example 9
Source Project: hadoop   Source File: DelegationTokenFetcher.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Cancel a Delegation Token.
 * @param nnAddr the NameNode's address
 * @param tok the token to cancel
 * @throws IOException
 * @throws AuthenticationException
 */
static public void cancelDelegationToken(URLConnectionFactory factory,
    URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
    AuthenticationException {
  StringBuilder buf = new StringBuilder(nnAddr.toString())
      .append(CancelDelegationTokenServlet.PATH_SPEC).append("?")
      .append(CancelDelegationTokenServlet.TOKEN).append("=")
      .append(tok.encodeToUrlString());
  HttpURLConnection conn = run(factory, new URL(buf.toString()));
  conn.disconnect();
}
 
Example 10
Source Project: hadoop   Source File: QuorumJournalManager.java    License: Apache License 2.0 5 votes vote down vote up
QuorumJournalManager(Configuration conf,
    URI uri, NamespaceInfo nsInfo,
    AsyncLogger.Factory loggerFactory) throws IOException {
  Preconditions.checkArgument(conf != null, "must be configured");

  this.conf = conf;
  this.uri = uri;
  this.nsInfo = nsInfo;
  this.loggers = new AsyncLoggerSet(createLoggers(loggerFactory));
  this.connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);

  // Configure timeouts.
  this.startSegmentTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_DEFAULT);
  this.prepareRecoveryTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_DEFAULT);
  this.acceptRecoveryTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_DEFAULT);
  this.finalizeSegmentTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_DEFAULT);
  this.selectInputStreamsTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_DEFAULT);
  this.getJournalStateTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_DEFAULT);
  this.newEpochTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_DEFAULT);
  this.writeTxnsTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_DEFAULT);
}
 
Example 11
Source Project: hadoop   Source File: TestNameNodeHttpServer.java    License: Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setUp() throws Exception {
  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  conf = new Configuration();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(TestNameNodeHttpServer.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);
}
 
Example 12
Source Project: big-c   Source File: DFSck.java    License: Apache License 2.0 5 votes vote down vote up
public DFSck(Configuration conf, PrintStream out) throws IOException {
  super(conf);
  this.ugi = UserGroupInformation.getCurrentUser();
  this.out = out;
  this.connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);
  this.isSpnegoEnabled = UserGroupInformation.isSecurityEnabled();
}
 
Example 13
Source Project: big-c   Source File: DelegationTokenFetcher.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Cancel a Delegation Token.
 * @param nnAddr the NameNode's address
 * @param tok the token to cancel
 * @throws IOException
 * @throws AuthenticationException
 */
static public void cancelDelegationToken(URLConnectionFactory factory,
    URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
    AuthenticationException {
  StringBuilder buf = new StringBuilder(nnAddr.toString())
      .append(CancelDelegationTokenServlet.PATH_SPEC).append("?")
      .append(CancelDelegationTokenServlet.TOKEN).append("=")
      .append(tok.encodeToUrlString());
  HttpURLConnection conn = run(factory, new URL(buf.toString()));
  conn.disconnect();
}
 
Example 14
Source Project: big-c   Source File: QuorumJournalManager.java    License: Apache License 2.0 5 votes vote down vote up
QuorumJournalManager(Configuration conf,
    URI uri, NamespaceInfo nsInfo,
    AsyncLogger.Factory loggerFactory) throws IOException {
  Preconditions.checkArgument(conf != null, "must be configured");

  this.conf = conf;
  this.uri = uri;
  this.nsInfo = nsInfo;
  this.loggers = new AsyncLoggerSet(createLoggers(loggerFactory));
  this.connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);

  // Configure timeouts.
  this.startSegmentTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_DEFAULT);
  this.prepareRecoveryTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_DEFAULT);
  this.acceptRecoveryTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_DEFAULT);
  this.finalizeSegmentTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_DEFAULT);
  this.selectInputStreamsTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_DEFAULT);
  this.getJournalStateTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_DEFAULT);
  this.newEpochTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_DEFAULT);
  this.writeTxnsTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_DEFAULT);
}
 
Example 15
Source Project: big-c   Source File: TestNameNodeHttpServer.java    License: Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setUp() throws Exception {
  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  conf = new Configuration();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(TestNameNodeHttpServer.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);
}
 
Example 16
@Inject
public OzoneManagerServiceProviderImpl(
    OzoneConfiguration configuration,
    ReconOMMetadataManager omMetadataManager,
    ReconTaskController reconTaskController,
    ReconUtils reconUtils,
    OzoneManagerProtocol ozoneManagerClient) {

  int connectionTimeout = (int) configuration.getTimeDuration(
      RECON_OM_CONNECTION_TIMEOUT,
      RECON_OM_CONNECTION_TIMEOUT_DEFAULT, TimeUnit.MILLISECONDS);
  int connectionRequestTimeout = (int)configuration.getTimeDuration(
      RECON_OM_CONNECTION_REQUEST_TIMEOUT,
      RECON_OM_CONNECTION_REQUEST_TIMEOUT_DEFAULT, TimeUnit.MILLISECONDS);

  connectionFactory =
      URLConnectionFactory.newDefaultURLConnectionFactory(connectionTimeout,
          connectionRequestTimeout, configuration);

  String ozoneManagerHttpAddress = configuration.get(OMConfigKeys
      .OZONE_OM_HTTP_ADDRESS_KEY);

  String ozoneManagerHttpsAddress = configuration.get(OMConfigKeys
      .OZONE_OM_HTTPS_ADDRESS_KEY);

  omSnapshotDBParentDir = reconUtils.getReconDbDir(configuration,
      OZONE_RECON_OM_SNAPSHOT_DB_DIR);

  HttpConfig.Policy policy = HttpConfig.getHttpPolicy(configuration);

  omDBSnapshotUrl = "http://" + ozoneManagerHttpAddress +
      OZONE_OM_DB_CHECKPOINT_HTTP_ENDPOINT;

  if (policy.isHttpsEnabled()) {
    omDBSnapshotUrl = "https://" + ozoneManagerHttpsAddress +
        OZONE_OM_DB_CHECKPOINT_HTTP_ENDPOINT;
  }

  boolean flushParam = configuration.getBoolean(
      RECON_OM_SNAPSHOT_TASK_FLUSH_PARAM, false);

  if (flushParam) {
    omDBSnapshotUrl += "?" + OZONE_DB_CHECKPOINT_REQUEST_FLUSH + "=true";
  }

  this.reconUtils = reconUtils;
  this.omMetadataManager = omMetadataManager;
  this.reconTaskController = reconTaskController;
  this.reconTaskStatusDao = reconTaskController.getReconTaskStatusDao();
  this.ozoneManagerClient = ozoneManagerClient;
  this.configuration = configuration;
  this.metrics = OzoneManagerSyncMetrics.create();
}
 
Example 17
Source Project: hadoop   Source File: EditLogFileInputStream.java    License: Apache License 2.0 4 votes vote down vote up
public URLLog(URLConnectionFactory connectionFactory, URL url) {
  this.connectionFactory = connectionFactory;
  this.isSpnegoEnabled = UserGroupInformation.isSecurityEnabled();
  this.url = url;
}
 
Example 18
Source Project: big-c   Source File: EditLogFileInputStream.java    License: Apache License 2.0 4 votes vote down vote up
public URLLog(URLConnectionFactory connectionFactory, URL url) {
  this.connectionFactory = connectionFactory;
  this.isSpnegoEnabled = UserGroupInformation.isSecurityEnabled();
  this.url = url;
}
 
Example 19
Source Project: hadoop   Source File: EditLogFileInputStream.java    License: Apache License 2.0 3 votes vote down vote up
/**
 * Open an EditLogInputStream for the given URL.
 *
 * @param connectionFactory
 *          the URLConnectionFactory used to create the connection.
 * @param url
 *          the url hosting the log
 * @param startTxId
 *          the expected starting txid
 * @param endTxId
 *          the expected ending txid
 * @param inProgress
 *          whether the log is in-progress
 * @return a stream from which edits may be read
 */
public static EditLogInputStream fromUrl(
    URLConnectionFactory connectionFactory, URL url, long startTxId,
    long endTxId, boolean inProgress) {
  return new EditLogFileInputStream(new URLLog(connectionFactory, url),
      startTxId, endTxId, inProgress);
}
 
Example 20
Source Project: big-c   Source File: EditLogFileInputStream.java    License: Apache License 2.0 3 votes vote down vote up
/**
 * Open an EditLogInputStream for the given URL.
 *
 * @param connectionFactory
 *          the URLConnectionFactory used to create the connection.
 * @param url
 *          the url hosting the log
 * @param startTxId
 *          the expected starting txid
 * @param endTxId
 *          the expected ending txid
 * @param inProgress
 *          whether the log is in-progress
 * @return a stream from which edits may be read
 */
public static EditLogInputStream fromUrl(
    URLConnectionFactory connectionFactory, URL url, long startTxId,
    long endTxId, boolean inProgress) {
  return new EditLogFileInputStream(new URLLog(connectionFactory, url),
      startTxId, endTxId, inProgress);
}