Java Code Examples for org.apache.hadoop.hbase.client.Scan#setIsolationLevel()

The following examples show how to use org.apache.hadoop.hbase.client.Scan#setIsolationLevel() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SkeletonClientSideRegionScanner.java    From spliceengine with GNU Affero General Public License v3.0 6 votes vote down vote up
public SkeletonClientSideRegionScanner(Configuration conf,
                                       FileSystem fs,
                                       Path rootDir,
                                       HTableDescriptor htd,
                                       HRegionInfo hri,
                                       Scan scan, String hostAndPort) throws IOException {
    if (LOG.isDebugEnabled())
        SpliceLogUtils.debug(LOG, "init for regionInfo=%s, scan=%s", hri,scan);
    scan.setIsolationLevel(IsolationLevel.READ_UNCOMMITTED);
    this.conf = conf;
    this.fs = fs;
    this.rootDir = rootDir;
    this.htd = htd;
    this.hri = new SpliceHRegionInfo(hri);
    this.scan = scan;
    this.hostAndPort = hostAndPort;
}
 
Example 2
Source File: TestRowProcessorEndpoint.java    From hbase with Apache License 2.0 5 votes vote down vote up
public static void doScan(HRegion region, Scan scan, List<Cell> result) throws IOException {
  InternalScanner scanner = null;
  try {
    scan.setIsolationLevel(IsolationLevel.READ_UNCOMMITTED);
    scanner = region.getScanner(scan);
    result.clear();
    scanner.next(result);
  } finally {
    if (scanner != null) {
      scanner.close();
    }
  }
}
 
Example 3
Source File: SnapshotScanner.java    From phoenix with Apache License 2.0 5 votes vote down vote up
public SnapshotScanner(Configuration conf, FileSystem fs, Path rootDir,
    TableDescriptor htd, RegionInfo hri,  Scan scan) throws Throwable{

  LOGGER.info("Creating SnapshotScanner for region: " + hri);

  scan.setIsolationLevel(IsolationLevel.READ_UNCOMMITTED);
  values = new ArrayList<>();
  this.region = HRegion.openHRegion(conf, fs, rootDir, hri, htd, null, null, null);
  this.scan = scan;

  RegionCoprocessorEnvironment snapshotEnv = getSnapshotContextEnvironment(conf);

  // Collect statistics during scan if ANALYZE_TABLE attribute is set
  if (ScanUtil.isAnalyzeTable(scan)) {
    this.scanner = region.getScanner(scan);
    PhoenixConnection connection = (PhoenixConnection) ConnectionUtil.getInputConnection(conf, new Properties());
    String tableName = region.getTableDescriptor().getTableName().getNameAsString();
    TableName physicalTableName = SchemaUtil.getPhysicalTableName(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES, conf);
    Table table = connection.getQueryServices().getTable(physicalTableName.getName());
    StatisticsWriter statsWriter = StatisticsWriter.newWriter(connection, tableName, HConstants.LATEST_TIMESTAMP);
    statisticsCollector = new DefaultStatisticsCollector(conf, region,
            tableName, null, null, null, statsWriter, table);
  } else if (scan.getAttribute(BaseScannerRegionObserver.NON_AGGREGATE_QUERY) != null) {
    RegionScannerFactory regionScannerFactory = new NonAggregateRegionScannerFactory(snapshotEnv);
    this.scanner = regionScannerFactory.getRegionScanner(scan, region.getScanner(scan));
    statisticsCollector = new NoOpStatisticsCollector();
  } else {
    /* future work : Snapshot M/R jobs for aggregate queries*/
    throw new UnsupportedOperationException("Snapshot M/R jobs not available for aggregate queries");
  }

  statisticsCollector.init();
  region.startRegionOperation();
}