org.apache.hive.hcatalog.streaming.StreamingException Java Examples

The following examples show how to use org.apache.hive.hcatalog.streaming.StreamingException. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HiveWriter.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
/**
 * Write the record data to Hive
 *
 * @throws IOException if an error occurs during the write
 * @throws InterruptedException if the write operation is interrupted
 */
public synchronized void write(final byte[] record)
        throws WriteFailure, SerializationError, InterruptedException {
    if (closed) {
        throw new IllegalStateException("This hive streaming writer was closed " +
                "and thus no longer able to write : " + endPoint);
    }
    // write the tuple
    try {
        LOG.debug("Writing event to {}", endPoint);
        callWithTimeout(new CallRunner<Void>() {
            @Override
            public Void call() throws StreamingException, InterruptedException {
                txnBatch.write(record);
                totalRecords++;
                return null;
            }
        });
    } catch (SerializationError se) {
        throw new SerializationError(endPoint.toString() + " SerializationError", se);
    } catch (StreamingException | TimeoutException e) {
        throw new WriteFailure(endPoint, txnBatch.getCurrentTxnId(), e);
    }
}
 
Example #2
Source File: HiveWriter.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
/**
 * Commits the current Txn if totalRecordsPerTransaction > 0 .
 * If 'rollToNext' is true, will switch to next Txn in batch or to a
 *       new TxnBatch if current Txn batch is exhausted
 */
public void flush(boolean rollToNext)
        throws CommitFailure, TxnBatchFailure, TxnFailure, InterruptedException {
    // if there are no records do not call flush
    if (totalRecords <= 0) return;
    try {
        synchronized (txnBatchLock) {
            commitTxn();
            nextTxn(rollToNext);
            totalRecords = 0;
            lastUsed = System.currentTimeMillis();
        }
    } catch (StreamingException e) {
        throw new TxnFailure(txnBatch, e);
    }
}
 
Example #3
Source File: HiveWriter.java    From nifi with Apache License 2.0 6 votes vote down vote up
/**
 * Commits the current Txn if totalRecordsPerTransaction > 0 .
 * If 'rollToNext' is true, will switch to next Txn in batch or to a
 *       new TxnBatch if current Txn batch is exhausted
 */
public void flush(boolean rollToNext)
        throws CommitFailure, TxnBatchFailure, TxnFailure, InterruptedException {
    // if there are no records do not call flush
    if (totalRecords <= 0) return;
    try {
        synchronized (txnBatchLock) {
            commitTxn();
            nextTxn(rollToNext);
            totalRecords = 0;
            lastUsed = System.currentTimeMillis();
        }
    } catch (StreamingException e) {
        throw new TxnFailure(txnBatch, e);
    }
}
 
Example #4
Source File: HiveWriter.java    From nifi with Apache License 2.0 6 votes vote down vote up
/**
 * Write the record data to Hive
 *
 * @throws IOException if an error occurs during the write
 * @throws InterruptedException if the write operation is interrupted
 */
public synchronized void write(final byte[] record)
        throws WriteFailure, SerializationError, InterruptedException {
    if (closed) {
        throw new IllegalStateException("This hive streaming writer was closed " +
                "and thus no longer able to write : " + endPoint);
    }
    // write the tuple
    try {
        LOG.debug("Writing event to {}", endPoint);
        callWithTimeout(new CallRunner<Void>() {
            @Override
            public Void call() throws StreamingException, InterruptedException {
                txnBatch.write(record);
                totalRecords++;
                return null;
            }
        });
    } catch (SerializationError se) {
        throw new SerializationError(endPoint.toString() + " SerializationError", se);
    } catch (StreamingException | TimeoutException e) {
        throw new WriteFailure(endPoint, txnBatch.getCurrentTxnId(), e);
    }
}
 
Example #5
Source File: HiveWriter.java    From nifi with Apache License 2.0 6 votes vote down vote up
protected RecordWriter getRecordWriter(HiveEndPoint endPoint, UserGroupInformation ugi, HiveConf hiveConf) throws StreamingException, IOException, InterruptedException {
    if (ugi == null) {
        return new StrictJsonWriter(endPoint, hiveConf);
    } else {
        try {
            return ugi.doAs((PrivilegedExceptionAction<StrictJsonWriter>) () -> new StrictJsonWriter(endPoint, hiveConf));
        } catch (UndeclaredThrowableException e) {
            Throwable cause = e.getCause();
            if (cause instanceof StreamingException) {
                throw (StreamingException) cause;
            } else {
                throw e;
            }
        }
    }
}
 
Example #6
Source File: HiveTarget.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Override
public StreamingConnection load(HiveEndPoint endPoint) throws StageException {
  StreamingConnection connection;
  try {
     connection = endPoint.newConnection(autoCreatePartitions, hiveConf, loginUgi);
  } catch (StreamingException | InterruptedException e) {
    throw new StageException(Errors.HIVE_09, e.toString(), e);
  }
  return connection;
}
 
Example #7
Source File: HiveWriterTest.java    From nifi with Apache License 2.0 5 votes vote down vote up
@Test(expected = HiveWriter.ConnectFailure.class)
public void testRecordWriterStreamingException() throws Exception {
    recordWriterCallable = mock(Callable.class);
    StreamingException streamingException = new StreamingException("Test Exception");
    when(recordWriterCallable.call()).thenThrow(streamingException);
    try {
        initWriter();
    } catch (HiveWriter.ConnectFailure e) {
        assertEquals(streamingException, e.getCause());
        throw e;
    }
}
 
Example #8
Source File: HiveWriter.java    From nifi with Apache License 2.0 5 votes vote down vote up
/**
 * if there are remainingTransactions in current txnBatch, begins nextTransactions
 * otherwise creates new txnBatch.
 * @param rollToNext Whether to roll to the next transaction batch
 */
protected void nextTxn(boolean rollToNext) throws StreamingException, InterruptedException, TxnBatchFailure {
    if (txnBatch.remainingTransactions() == 0) {
        closeTxnBatch();
        txnBatch = null;
        if (rollToNext) {
            txnBatch = nextTxnBatch(recordWriter);
        }
    } else if (rollToNext) {
        LOG.debug("Switching to next Txn for {}", endPoint);
        txnBatch.beginNextTransaction(); // does not block
    }
}
 
Example #9
Source File: HiveWriter.java    From nifi with Apache License 2.0 5 votes vote down vote up
/**
 * Aborts the current Txn and switches to next Txn.
 * @throws StreamingException if could not get new Transaction Batch, or switch to next Txn
 */
public void abort() throws StreamingException, TxnBatchFailure, InterruptedException {
    synchronized (txnBatchLock) {
        abortTxn();
        nextTxn(true); // roll to next
    }
}
 
Example #10
Source File: HiveWriter.java    From nifi with Apache License 2.0 5 votes vote down vote up
protected TransactionBatch nextTxnBatch(final RecordWriter recordWriter)
        throws InterruptedException, TxnBatchFailure {
    LOG.debug("Fetching new Txn Batch for {}", endPoint);
    TransactionBatch batch = null;
    try {
        batch = callWithTimeout(() -> {
            return connection.fetchTransactionBatch(txnsPerBatch, recordWriter); // could block
        });
        batch.beginNextTransaction();
        LOG.debug("Acquired {}. Switching to first txn", batch);
    } catch (TimeoutException | StreamingException e) {
        throw new TxnBatchFailure(endPoint, e);
    }
    return batch;
}
 
Example #11
Source File: HiveWriter.java    From nifi with Apache License 2.0 5 votes vote down vote up
protected StreamingConnection newConnection(HiveEndPoint endPoint, boolean autoCreatePartitions, HiveConf conf, UserGroupInformation ugi) throws InterruptedException, ConnectFailure {
    try {
        return callWithTimeout(() -> {
            return endPoint.newConnection(autoCreatePartitions, conf, ugi); // could block
        });
    } catch (StreamingException | TimeoutException e) {
        throw new ConnectFailure(endPoint, e);
    }
}
 
Example #12
Source File: HiveWriter.java    From nifi with Apache License 2.0 5 votes vote down vote up
protected void commitTxn() throws CommitFailure, InterruptedException {
    LOG.debug("Committing Txn id {} to {}", txnBatch.getCurrentTxnId(), endPoint);
    try {
        callWithTimeout(new CallRunner<Void>() {
            @Override
            public Void call() throws Exception {
                txnBatch.commit(); // could block
                return null;
            }
        });
    } catch (StreamingException | TimeoutException e) {
        throw new CommitFailure(endPoint, txnBatch.getCurrentTxnId(), e);
    }
}
 
Example #13
Source File: PutHiveStreaming.java    From nifi with Apache License 2.0 5 votes vote down vote up
/**
 * Abort current Txn on all writers
 */
private void abortAllWriters(Map<HiveEndPoint, HiveWriter> writers) throws InterruptedException, StreamingException, HiveWriter.TxnBatchFailure {
    for (Map.Entry<HiveEndPoint, HiveWriter> entry : writers.entrySet()) {
        try {
            entry.getValue().abort();
        } catch (Exception e) {
            getLogger().error("Failed to abort hive transaction batch, HiveEndPoint " + entry.getValue() + " due to exception ", e);
        }
    }
}
 
Example #14
Source File: PutHiveStreaming.java    From localization_nifi with Apache License 2.0 5 votes vote down vote up
/**
 * Abort current Txn on all writers
 */
private void abortAllWriters() throws InterruptedException, StreamingException, HiveWriter.TxnBatchFailure {
    for (Map.Entry<HiveEndPoint, HiveWriter> entry : allWriters.entrySet()) {
        try {
            entry.getValue().abort();
        } catch (Exception e) {
            getLogger().error("Failed to abort hive transaction batch, HiveEndPoint " + entry.getValue() + " due to exception ", e);
        }
    }
}
 
Example #15
Source File: HiveWriter.java    From localization_nifi with Apache License 2.0 5 votes vote down vote up
/**
 * Execute the callable on a separate thread and wait for the completion
 * for the specified amount of time in milliseconds. In case of timeout
 * cancel the callable and throw an IOException
 */
private <T> T callWithTimeout(final CallRunner<T> callRunner)
        throws TimeoutException, StreamingException, InterruptedException {
    Future<T> future = callTimeoutPool.submit(() -> {
        if (ugi == null) {
            return callRunner.call();
        }
        return ugi.doAs((PrivilegedExceptionAction<T>) () -> callRunner.call());
    });
    try {
        if (callTimeout > 0) {
            return future.get(callTimeout, TimeUnit.MILLISECONDS);
        } else {
            return future.get();
        }
    } catch (TimeoutException eT) {
        future.cancel(true);
        throw eT;
    } catch (ExecutionException e1) {
        Throwable cause = e1.getCause();
        if (cause instanceof IOException) {
            throw new StreamingIOFailure("I/O Failure", (IOException) cause);
        } else if (cause instanceof StreamingException) {
            throw (StreamingException) cause;
        } else if (cause instanceof InterruptedException) {
            throw (InterruptedException) cause;
        } else if (cause instanceof RuntimeException) {
            throw (RuntimeException) cause;
        } else if (cause instanceof TimeoutException) {
            throw new StreamingException("Operation Timed Out.", (TimeoutException) cause);
        } else {
            throw new RuntimeException(e1);
        }
    }
}
 
Example #16
Source File: HiveWriter.java    From localization_nifi with Apache License 2.0 5 votes vote down vote up
/**
 * if there are remainingTransactions in current txnBatch, begins nextTransactions
 * otherwise creates new txnBatch.
 * @param rollToNext Whether to roll to the next transaction batch
 */
protected void nextTxn(boolean rollToNext) throws StreamingException, InterruptedException, TxnBatchFailure {
    if (txnBatch.remainingTransactions() == 0) {
        closeTxnBatch();
        txnBatch = null;
        if (rollToNext) {
            txnBatch = nextTxnBatch(recordWriter);
        }
    } else if (rollToNext) {
        LOG.debug("Switching to next Txn for {}", endPoint);
        txnBatch.beginNextTransaction(); // does not block
    }
}
 
Example #17
Source File: HiveWriter.java    From localization_nifi with Apache License 2.0 5 votes vote down vote up
/**
 * Aborts the current Txn and switches to next Txn.
 * @throws StreamingException if could not get new Transaction Batch, or switch to next Txn
 */
public void abort() throws StreamingException, TxnBatchFailure, InterruptedException {
    synchronized (txnBatchLock) {
        abortTxn();
        nextTxn(true); // roll to next
    }
}
 
Example #18
Source File: HiveWriter.java    From localization_nifi with Apache License 2.0 5 votes vote down vote up
protected TransactionBatch nextTxnBatch(final RecordWriter recordWriter)
        throws InterruptedException, TxnBatchFailure {
    LOG.debug("Fetching new Txn Batch for {}", endPoint);
    TransactionBatch batch = null;
    try {
        batch = callWithTimeout(() -> {
            return connection.fetchTransactionBatch(txnsPerBatch, recordWriter); // could block
        });
        batch.beginNextTransaction();
        LOG.debug("Acquired {}. Switching to first txn", batch);
    } catch (TimeoutException | StreamingException e) {
        throw new TxnBatchFailure(endPoint, e);
    }
    return batch;
}
 
Example #19
Source File: HiveWriter.java    From localization_nifi with Apache License 2.0 5 votes vote down vote up
protected StreamingConnection newConnection(HiveEndPoint endPoint, boolean autoCreatePartitions, HiveConf conf, UserGroupInformation ugi) throws InterruptedException, ConnectFailure {
    try {
        return callWithTimeout(() -> {
            return endPoint.newConnection(autoCreatePartitions, conf, ugi); // could block
        });
    } catch (StreamingException | TimeoutException e) {
        throw new ConnectFailure(endPoint, e);
    }
}
 
Example #20
Source File: HiveWriter.java    From localization_nifi with Apache License 2.0 5 votes vote down vote up
protected void commitTxn() throws CommitFailure, InterruptedException {
    LOG.debug("Committing Txn id {} to {}", txnBatch.getCurrentTxnId(), endPoint);
    try {
        callWithTimeout(new CallRunner<Void>() {
            @Override
            public Void call() throws Exception {
                txnBatch.commit(); // could block
                return null;
            }
        });
    } catch (StreamingException | TimeoutException e) {
        throw new CommitFailure(endPoint, txnBatch.getCurrentTxnId(), e);
    }
}
 
Example #21
Source File: HiveWriter.java    From localization_nifi with Apache License 2.0 5 votes vote down vote up
protected RecordWriter getRecordWriter(HiveEndPoint endPoint, UserGroupInformation ugi, HiveConf hiveConf) throws StreamingException, IOException, InterruptedException {
    if (ugi == null) {
        return new StrictJsonWriter(endPoint, hiveConf);
    } else {
        return ugi.doAs((PrivilegedExceptionAction<StrictJsonWriter>) () -> new StrictJsonWriter(endPoint, hiveConf));
    }
}
 
Example #22
Source File: StreamlineHiveMapper.java    From streamline with Apache License 2.0 4 votes vote down vote up
@Override
public void write(TransactionBatch transactionBatch, Tuple tuple) throws StreamingException, IOException, InterruptedException {
    transactionBatch.write(mapRecord(tuple));
}
 
Example #23
Source File: StreamlineHiveMapper.java    From streamline with Apache License 2.0 4 votes vote down vote up
@Override
public RecordWriter createRecordWriter(HiveEndPoint hiveEndPoint) throws StreamingException, IOException, ClassNotFoundException {
  List<String> result = fields.stream().map(String::toLowerCase).collect(Collectors.toList());
  return new DelimitedInputWriter(result.toArray(new String[0]), fieldDelimiter, hiveEndPoint);
}
 
Example #24
Source File: HiveTarget.java    From datacollector with Apache License 2.0 4 votes vote down vote up
private TransactionBatch getBatch(int batchSize, HiveEndPoint endPoint) throws InterruptedException,
    StreamingException, ExecutionException {
  return hiveConnectionPool.get(endPoint).fetchTransactionBatch(batchSize, recordWriterPool.get(endPoint));
}
 
Example #25
Source File: TestPutHiveStreaming.java    From localization_nifi with Apache License 2.0 4 votes vote down vote up
@Override
protected void nextTxn(boolean rollToNext) throws StreamingException, InterruptedException, TxnBatchFailure {
    // Empty
}
 
Example #26
Source File: TestPutHiveStreaming.java    From localization_nifi with Apache License 2.0 4 votes vote down vote up
@Override
protected RecordWriter getRecordWriter(HiveEndPoint endPoint, UserGroupInformation ugi, HiveConf conf) throws StreamingException {
    assertEquals(hiveConf, conf);
    return mock(RecordWriter.class);
}
 
Example #27
Source File: TestPutHiveStreaming.java    From nifi with Apache License 2.0 4 votes vote down vote up
@Override
protected RecordWriter getRecordWriter(HiveEndPoint endPoint, UserGroupInformation ugi, HiveConf conf) throws StreamingException {
    assertEquals(hiveConf, conf);
    return mock(RecordWriter.class);
}
 
Example #28
Source File: TestPutHiveStreaming.java    From nifi with Apache License 2.0 4 votes vote down vote up
@Override
protected void nextTxn(boolean rollToNext) throws StreamingException, InterruptedException, TxnBatchFailure {
    // Empty
}
 
Example #29
Source File: TestPutHiveStreaming.java    From localization_nifi with Apache License 2.0 2 votes vote down vote up
@Override
public void abort() throws StreamingException, TxnBatchFailure, InterruptedException {

}
 
Example #30
Source File: TestPutHiveStreaming.java    From nifi with Apache License 2.0 2 votes vote down vote up
@Override
public void abort() throws StreamingException, TxnBatchFailure, InterruptedException {

}