Java Code Examples for org.apache.hadoop.hbase.regionserver.InternalScanner#next()
The following examples show how to use
org.apache.hadoop.hbase.regionserver.InternalScanner#next() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestFilterFromRegionSide.java From hbase with Apache License 2.0 | 6 votes |
@Test public void testFirstSeveralCellsFilterAndBatch() throws IOException { Scan scan = new Scan(); scan.setFilter(new FirstSeveralCellsFilter()); scan.setBatch(NUM_COLS); InternalScanner scanner = REGION.getScanner(scan); List<Cell> results = new ArrayList<>(); for (int i = 0; i < NUM_ROWS; i++) { results.clear(); scanner.next(results); assertEquals(NUM_COLS, results.size()); Cell cell = results.get(0); assertArrayEquals(ROWS[i], Bytes.copy(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength())); assertArrayEquals(FAMILIES[0], Bytes.copy(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength())); assertArrayEquals(QUALIFIERS[0], Bytes.copy(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength())); } assertFalse(scanner.next(results)); scanner.close(); }
Example 2
Source File: TestFilter.java From hbase with Apache License 2.0 | 6 votes |
private void verifyScanNoEarlyOut(Scan s, long expectedRows, long expectedKeys) throws IOException { InternalScanner scanner = this.region.getScanner(s); List<Cell> results = new ArrayList<>(); int i = 0; for (boolean done = true; done; i++) { done = scanner.next(results); Arrays.sort(results.toArray(new Cell[results.size()]), CellComparator.getInstance()); LOG.info("counter=" + i + ", " + results); if(results.isEmpty()) break; assertTrue("Scanned too many rows! Only expected " + expectedRows + " total but already scanned " + (i+1), expectedRows > i); assertEquals("Expected " + expectedKeys + " keys per row but " + "returned " + results.size(), expectedKeys, results.size()); results.clear(); } assertEquals("Expected " + expectedRows + " rows but scanned " + i + " rows", expectedRows, i); }
Example 3
Source File: TestFilter.java From hbase with Apache License 2.0 | 6 votes |
/** * Tests the the {@link WhileMatchFilter} works in combination with a * {@link Filter} that uses the * {@link Filter#filterRowKey(Cell)} method. * * See HBASE-2258. * * @throws Exception */ @Test public void testWhileMatchFilterWithFilterRowKey() throws Exception { Scan s = new Scan(); String prefix = "testRowOne"; WhileMatchFilter filter = new WhileMatchFilter(new PrefixFilter(Bytes.toBytes(prefix))); s.setFilter(filter); InternalScanner scanner = this.region.getScanner(s); while (true) { ArrayList<Cell> values = new ArrayList<>(); boolean isMoreResults = scanner.next(values); if (!isMoreResults || !Bytes.toString(CellUtil.cloneRow(values.get(0))).startsWith(prefix)) { assertTrue("The WhileMatchFilter should now filter all remaining", filter.filterAllRemaining()); } if (!isMoreResults) { break; } } }
Example 4
Source File: HBaseTestingUtility.java From hbase with Apache License 2.0 | 6 votes |
/** * Do a small get/scan against one store. This is required because store * has no actual methods of querying itself, and relies on StoreScanner. */ public static List<Cell> getFromStoreFile(HStore store, Get get) throws IOException { Scan scan = new Scan(get); InternalScanner scanner = (InternalScanner) store.getScanner(scan, scan.getFamilyMap().get(store.getColumnFamilyDescriptor().getName()), // originally MultiVersionConcurrencyControl.resetThreadReadPoint() was called to set // readpoint 0. 0); List<Cell> result = new ArrayList<>(); scanner.next(result); if (!result.isEmpty()) { // verify that we are on the row we want: Cell kv = result.get(0); if (!CellUtil.matchingRows(kv, get.getRow())) { result.clear(); } } scanner.close(); return result; }
Example 5
Source File: TestDependentColumnFilter.java From hbase with Apache License 2.0 | 6 votes |
/** * This shouldn't be confused with TestFilter#verifyScan * as expectedKeys is not the per row total, but the scan total * * @param s * @param expectedRows * @param expectedCells * @throws IOException */ private void verifyScan(Scan s, long expectedRows, long expectedCells) throws IOException { InternalScanner scanner = this.region.getScanner(s); List<Cell> results = new ArrayList<>(); int i = 0; int cells = 0; for (boolean done = true; done; i++) { done = scanner.next(results); Arrays.sort(results.toArray(new Cell[results.size()]), CellComparatorImpl.COMPARATOR); LOG.info("counter=" + i + ", " + results); if (results.isEmpty()) break; cells += results.size(); assertTrue("Scanned too many rows! Only expected " + expectedRows + " total but already scanned " + (i+1), expectedRows > i); assertTrue("Expected " + expectedCells + " cells total but " + "already scanned " + cells, expectedCells >= cells); results.clear(); } assertEquals("Expected " + expectedRows + " rows but scanned " + i + " rows", expectedRows, i); assertEquals("Expected " + expectedCells + " cells but scanned " + cells + " cells", expectedCells, cells); }
Example 6
Source File: TestFilter.java From hbase with Apache License 2.0 | 6 votes |
@Test public void testWhileMatchFilterWithFilterRowKeyWithReverseScan() throws Exception { Scan s = new Scan(); String prefix = "testRowOne"; WhileMatchFilter filter = new WhileMatchFilter(new PrefixFilter( Bytes.toBytes(prefix))); s.setFilter(filter); s.setReversed(true); InternalScanner scanner = this.region.getScanner(s); while (true) { ArrayList<Cell> values = new ArrayList<>(); boolean isMoreResults = scanner.next(values); if (!isMoreResults || !Bytes.toString(CellUtil.cloneRow(values.get(0))).startsWith(prefix)) { Assert.assertTrue( "The WhileMatchFilter should now filter all remaining", filter.filterAllRemaining()); } if (!isMoreResults) { break; } } scanner.close(); }
Example 7
Source File: RowCountEndpoint.java From hbase with Apache License 2.0 | 5 votes |
/** * Returns a count of the rows in the region where this coprocessor is loaded. */ @Override public void getRowCount(RpcController controller, CountRequest request, RpcCallback<CountResponse> done) { Scan scan = new Scan(); scan.setFilter(new FirstKeyOnlyFilter()); CountResponse response = null; InternalScanner scanner = null; try { scanner = env.getRegion().getScanner(scan); List<Cell> results = new ArrayList<>(); boolean hasMore = false; byte[] lastRow = null; long count = 0; do { hasMore = scanner.next(results); for (Cell kv : results) { byte[] currentRow = CellUtil.cloneRow(kv); if (lastRow == null || !Bytes.equals(lastRow, currentRow)) { lastRow = currentRow; count++; } } results.clear(); } while (hasMore); response = CountResponse.newBuilder() .setCount(count).build(); } catch (IOException ioe) { CoprocessorRpcUtils.setControllerException(controller, ioe); } finally { if (scanner != null) { try { scanner.close(); } catch (IOException ignored) {} } } done.run(response); }
Example 8
Source File: RowCountEndpoint.java From hbase with Apache License 2.0 | 5 votes |
/** * Returns a count of all KeyValues in the region where this coprocessor is loaded. */ @Override public void getKeyValueCount(RpcController controller, CountRequest request, RpcCallback<CountResponse> done) { CountResponse response = null; InternalScanner scanner = null; try { scanner = env.getRegion().getScanner(new Scan()); List<Cell> results = new ArrayList<>(); boolean hasMore = false; long count = 0; do { hasMore = scanner.next(results); for (Cell kv : results) { count++; } results.clear(); } while (hasMore); response = CountResponse.newBuilder() .setCount(count).build(); } catch (IOException ioe) { CoprocessorRpcUtils.setControllerException(controller, ioe); } finally { if (scanner != null) { try { scanner.close(); } catch (IOException ignored) {} } } done.run(response); }
Example 9
Source File: TestFilter.java From hbase with Apache License 2.0 | 5 votes |
private void verifyScanFull(Scan s, KeyValue [] kvs) throws IOException { InternalScanner scanner = this.region.getScanner(s); List<Cell> results = new ArrayList<>(); int row = 0; int idx = 0; for (boolean done = true; done; row++) { done = scanner.next(results); Arrays.sort(results.toArray(new Cell[results.size()]), CellComparator.getInstance()); if(results.isEmpty()) break; assertTrue("Scanned too many keys! Only expected " + kvs.length + " total but already scanned " + (results.size() + idx) + (results.isEmpty() ? "" : "(" + results.get(0).toString() + ")"), kvs.length >= idx + results.size()); for (Cell kv : results) { LOG.info("row=" + row + ", result=" + kv.toString() + ", match=" + kvs[idx].toString()); assertTrue("Row mismatch", CellUtil.matchingRows(kv, kvs[idx])); assertTrue("Family mismatch", CellUtil.matchingFamily(kv, kvs[idx])); assertTrue("Qualifier mismatch", CellUtil.matchingQualifier(kv, kvs[idx])); assertTrue("Value mismatch", CellUtil.matchingValue(kv, kvs[idx])); idx++; } results.clear(); } LOG.info("Looked at " + row + " rows with " + idx + " keys"); assertEquals("Expected " + kvs.length + " total keys but scanned " + idx, kvs.length, idx); }
Example 10
Source File: TestRowCountEndPoint.java From BigData-In-Practice with Apache License 2.0 | 5 votes |
@Override public void getRowCount(RpcController controller, getRowCountRequest request, RpcCallback<getRowCountResponse> done) { // 单个region上的计算结果值 int result = 0; // 定义返回response getRowCountResponse.Builder responseBuilder = getRowCountResponse.newBuilder(); // 进行行数统计 InternalScanner scanner = null; try { Scan scan = new Scan(); scanner = this.envi.getRegion().getScanner(scan); List<Cell> results = new ArrayList<Cell>(); boolean hasMore = false; do { hasMore = scanner.next(results); result++; } while (hasMore); } catch (IOException ioe) { ioe.printStackTrace(); } finally { if (scanner != null) { try { scanner.close(); } catch (IOException ignored) { // nothing to do } } } responseBuilder.setRowCount(result); done.run(responseBuilder.build()); return; }
Example 11
Source File: TestScannerFromBucketCache.java From hbase with Apache License 2.0 | 5 votes |
private List<Cell> performScan(byte[] row1, byte[] fam1) throws IOException { Scan scan = new Scan().withStartRow(row1).addFamily(fam1).readVersions(MAX_VERSIONS); List<Cell> actual = new ArrayList<>(); InternalScanner scanner = region.getScanner(scan); boolean hasNext = scanner.next(actual); assertEquals(false, hasNext); return actual; }
Example 12
Source File: TestMobStoreCompaction.java From hbase with Apache License 2.0 | 5 votes |
private int countReferencedMobFiles() throws IOException { Scan scan = new Scan(); // Do not retrieve the mob data when scanning scan.setAttribute(MobConstants.MOB_SCAN_RAW, Bytes.toBytes(Boolean.TRUE)); InternalScanner scanner = region.getScanner(scan); List<Cell> kvs = new ArrayList<>(); boolean hasMore = true; String fileName; Set<String> files = new HashSet<>(); do { kvs.clear(); hasMore = scanner.next(kvs); for (Cell kv : kvs) { if (!MobUtils.isMobReferenceCell(kv)) { continue; } if (!MobUtils.hasValidMobRefCellValue(kv)) { continue; } int size = MobUtils.getMobValueLength(kv); if (size <= mobCellThreshold) { continue; } fileName = MobUtils.getMobFileName(kv); if (fileName.isEmpty()) { continue; } files.add(fileName); Path familyPath = MobUtils.getMobFamilyPath(conf, tableDescriptor.getTableName(), familyDescriptor.getNameAsString()); assertTrue(fs.exists(new Path(familyPath, fileName))); } } while (hasMore); scanner.close(); return files.size(); }
Example 13
Source File: AggregateImplementation.java From hbase with Apache License 2.0 | 4 votes |
/** * Gives a List containing sum of values and sum of weights. * It is computed for the combination of column * family and column qualifier(s) in the given row range as defined in the * Scan object. In its current implementation, it takes one column family and * two column qualifiers. The first qualifier is for values column and * the second qualifier (optional) is for weight column. */ @Override public void getMedian(RpcController controller, AggregateRequest request, RpcCallback<AggregateResponse> done) { AggregateResponse response = null; InternalScanner scanner = null; try { ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request); S sumVal = null, sumWeights = null, tempVal = null, tempWeight = null; Scan scan = ProtobufUtil.toScan(request.getScan()); scanner = env.getRegion().getScanner(scan); byte[] colFamily = scan.getFamilies()[0]; NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily); byte[] valQualifier = null, weightQualifier = null; if (qualifiers != null && !qualifiers.isEmpty()) { valQualifier = qualifiers.pollFirst(); // if weighted median is requested, get qualifier for the weight column weightQualifier = qualifiers.pollLast(); } List<Cell> results = new ArrayList<>(); boolean hasMoreRows = false; do { tempVal = null; tempWeight = null; hasMoreRows = scanner.next(results); int listSize = results.size(); for (int i = 0; i < listSize; i++) { Cell kv = results.get(i); tempVal = ci.add(tempVal, ci.castToReturnType(ci.getValue(colFamily, valQualifier, kv))); if (weightQualifier != null) { tempWeight = ci.add(tempWeight, ci.castToReturnType(ci.getValue(colFamily, weightQualifier, kv))); } } results.clear(); sumVal = ci.add(sumVal, tempVal); sumWeights = ci.add(sumWeights, tempWeight); } while (hasMoreRows); ByteString first_sumVal = ci.getProtoForPromotedType(sumVal).toByteString(); S s = sumWeights == null ? ci.castToReturnType(ci.getMinValue()) : sumWeights; ByteString first_sumWeights = ci.getProtoForPromotedType(s).toByteString(); AggregateResponse.Builder pair = AggregateResponse.newBuilder(); pair.addFirstPart(first_sumVal); pair.addFirstPart(first_sumWeights); response = pair.build(); } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); } finally { if (scanner != null) { try { scanner.close(); } catch (IOException ignored) {} } } done.run(response); }
Example 14
Source File: TestScannerSelectionUsingKeyRange.java From hbase with Apache License 2.0 | 4 votes |
@Test public void testScannerSelection() throws IOException { Configuration conf = TEST_UTIL.getConfiguration(); conf.setInt("hbase.hstore.compactionThreshold", 10000); ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor familyDescriptor = new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY_BYTES) .setBlockCacheEnabled(true) .setBloomFilterType(bloomType); TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor = new TableDescriptorBuilder.ModifyableTableDescriptor(TABLE); tableDescriptor.setColumnFamily(familyDescriptor); RegionInfo info = RegionInfoBuilder.newBuilder(TABLE).build(); HRegion region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), conf, tableDescriptor); for (int iFile = 0; iFile < NUM_FILES; ++iFile) { for (int iRow = 0; iRow < NUM_ROWS; ++iRow) { Put put = new Put(Bytes.toBytes("row" + iRow)); for (int iCol = 0; iCol < NUM_COLS_PER_ROW; ++iCol) { put.addColumn(FAMILY_BYTES, Bytes.toBytes("col" + iCol), Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol)); } region.put(put); } region.flush(true); } Scan scan = new Scan().withStartRow(Bytes.toBytes("aaa")).withStopRow(Bytes.toBytes("aaz")); BlockCache cache = BlockCacheFactory.createBlockCache(conf); InternalScanner scanner = region.getScanner(scan); List<Cell> results = new ArrayList<>(); while (scanner.next(results)) { } scanner.close(); assertEquals(0, results.size()); if (cache instanceof LruBlockCache) { Set<String> accessedFiles = ((LruBlockCache)cache).getCachedFileNamesForTest(); assertEquals(expectedCount, accessedFiles.size()); } HBaseTestingUtility.closeRegionAndWAL(region); }
Example 15
Source File: TestFilter.java From hbase with Apache License 2.0 | 4 votes |
@Test public void testFilterListWithPrefixFilter() throws IOException { byte[] family = Bytes.toBytes("f1"); byte[] qualifier = Bytes.toBytes("q1"); TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor = new TableDescriptorBuilder.ModifyableTableDescriptor( TableName.valueOf(name.getMethodName())); tableDescriptor.setColumnFamily( new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(family)); RegionInfo info = RegionInfoBuilder.newBuilder(tableDescriptor.getTableName()).build(); HRegion testRegion = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), TEST_UTIL.getConfiguration(), tableDescriptor); for(int i=0; i<5; i++) { Put p = new Put(Bytes.toBytes((char)('a'+i) + "row")); p.setDurability(Durability.SKIP_WAL); p.addColumn(family, qualifier, Bytes.toBytes(String.valueOf(111 + i))); testRegion.put(p); } testRegion.flush(true); // rows starting with "b" PrefixFilter pf = new PrefixFilter(new byte[] {'b'}) ; // rows with value of column 'q1' set to '113' SingleColumnValueFilter scvf = new SingleColumnValueFilter( family, qualifier, CompareOperator.EQUAL, Bytes.toBytes("113")); // combine these two with OR in a FilterList FilterList filterList = new FilterList(Operator.MUST_PASS_ONE, pf, scvf); Scan s1 = new Scan(); s1.setFilter(filterList); InternalScanner scanner = testRegion.getScanner(s1); List<Cell> results = new ArrayList<>(); int resultCount = 0; while (scanner.next(results)) { resultCount++; byte[] row = CellUtil.cloneRow(results.get(0)); LOG.debug("Found row: " + Bytes.toStringBinary(row)); assertTrue(Bytes.equals(row, Bytes.toBytes("brow")) || Bytes.equals(row, Bytes.toBytes("crow"))); results.clear(); } assertEquals(2, resultCount); scanner.close(); WAL wal = ((HRegion)testRegion).getWAL(); ((HRegion)testRegion).close(); wal.close(); }
Example 16
Source File: TestMultipleColumnPrefixFilter.java From hbase with Apache License 2.0 | 4 votes |
@Test public void testMultipleColumnPrefixFilter() throws IOException { String family = "Family"; TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())); ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder .newBuilder(Bytes.toBytes(family)) .setMaxVersions(3) .build(); tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor); TableDescriptor tableDescriptor = tableDescriptorBuilder.build(); // HRegionInfo info = new HRegionInfo(htd, null, null, false); RegionInfo info = RegionInfoBuilder.newBuilder(tableDescriptor.getTableName()).build(); HRegion region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL. getDataTestDir(), TEST_UTIL.getConfiguration(), tableDescriptor); List<String> rows = generateRandomWords(100, "row"); List<String> columns = generateRandomWords(10000, "column"); long maxTimestamp = 2; List<Cell> kvList = new ArrayList<>(); Map<String, List<Cell>> prefixMap = new HashMap<>(); prefixMap.put("p", new ArrayList<>()); prefixMap.put("q", new ArrayList<>()); prefixMap.put("s", new ArrayList<>()); String valueString = "ValueString"; for (String row: rows) { Put p = new Put(Bytes.toBytes(row)); p.setDurability(Durability.SKIP_WAL); for (String column: columns) { for (long timestamp = 1; timestamp <= maxTimestamp; timestamp++) { KeyValue kv = KeyValueTestUtil.create(row, family, column, timestamp, valueString); p.add(kv); kvList.add(kv); for (String s: prefixMap.keySet()) { if (column.startsWith(s)) { prefixMap.get(s).add(kv); } } } } region.put(p); } MultipleColumnPrefixFilter filter; Scan scan = new Scan(); scan.readAllVersions(); byte [][] filter_prefix = new byte [2][]; filter_prefix[0] = new byte [] {'p'}; filter_prefix[1] = new byte [] {'q'}; filter = new MultipleColumnPrefixFilter(filter_prefix); scan.setFilter(filter); List<Cell> results = new ArrayList<>(); InternalScanner scanner = region.getScanner(scan); while (scanner.next(results)) ; assertEquals(prefixMap.get("p").size() + prefixMap.get("q").size(), results.size()); HBaseTestingUtility.closeRegionAndWAL(region); }
Example 17
Source File: TestScannerSelectionUsingTTL.java From hbase with Apache License 2.0 | 4 votes |
@Test public void testScannerSelection() throws IOException { Configuration conf = TEST_UTIL.getConfiguration(); conf.setBoolean("hbase.store.delete.expired.storefile", false); LruBlockCache cache = (LruBlockCache) BlockCacheFactory.createBlockCache(conf); TableDescriptor td = TableDescriptorBuilder.newBuilder(TABLE).setColumnFamily( ColumnFamilyDescriptorBuilder.newBuilder(FAMILY_BYTES).setMaxVersions(Integer.MAX_VALUE) .setTimeToLive(TTL_SECONDS).build()).build(); RegionInfo info = RegionInfoBuilder.newBuilder(TABLE).build(); HRegion region = HBaseTestingUtility .createRegionAndWAL(info, TEST_UTIL.getDataTestDir(info.getEncodedName()), conf, td, cache); long ts = EnvironmentEdgeManager.currentTime(); long version = 0; //make sure each new set of Put's have a new ts for (int iFile = 0; iFile < totalNumFiles; ++iFile) { if (iFile == NUM_EXPIRED_FILES) { Threads.sleepWithoutInterrupt(TTL_MS); version += TTL_MS; } for (int iRow = 0; iRow < NUM_ROWS; ++iRow) { Put put = new Put(Bytes.toBytes("row" + iRow)); for (int iCol = 0; iCol < NUM_COLS_PER_ROW; ++iCol) { put.addColumn(FAMILY_BYTES, Bytes.toBytes("col" + iCol), ts + version, Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol)); } region.put(put); } region.flush(true); version++; } Scan scan = new Scan().readVersions(Integer.MAX_VALUE); cache.clearCache(); InternalScanner scanner = region.getScanner(scan); List<Cell> results = new ArrayList<>(); final int expectedKVsPerRow = numFreshFiles * NUM_COLS_PER_ROW; int numReturnedRows = 0; LOG.info("Scanning the entire table"); while (scanner.next(results) || results.size() > 0) { assertEquals(expectedKVsPerRow, results.size()); ++numReturnedRows; results.clear(); } assertEquals(NUM_ROWS, numReturnedRows); Set<String> accessedFiles = cache.getCachedFileNamesForTest(); LOG.debug("Files accessed during scan: " + accessedFiles); // Exercise both compaction codepaths. if (explicitCompaction) { HStore store = region.getStore(FAMILY_BYTES); store.compactRecentForTestingAssumingDefaultPolicy(totalNumFiles); } else { region.compact(false); } HBaseTestingUtility.closeRegionAndWAL(region); }
Example 18
Source File: AggregateImplementation.java From hbase with Apache License 2.0 | 4 votes |
/** * Gives the sum for a given combination of column qualifier and column * family, in the given row range as defined in the Scan object. In its * current implementation, it takes one column family and one column qualifier * (if provided). In case of null column qualifier, sum for the entire column * family will be returned. */ @Override public void getSum(RpcController controller, AggregateRequest request, RpcCallback<AggregateResponse> done) { AggregateResponse response = null; InternalScanner scanner = null; long sum = 0L; try { ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request); S sumVal = null; T temp; Scan scan = ProtobufUtil.toScan(request.getScan()); scanner = env.getRegion().getScanner(scan); byte[] colFamily = scan.getFamilies()[0]; NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily); byte[] qualifier = null; if (qualifiers != null && !qualifiers.isEmpty()) { qualifier = qualifiers.pollFirst(); } List<Cell> results = new ArrayList<>(); boolean hasMoreRows = false; do { hasMoreRows = scanner.next(results); int listSize = results.size(); for (int i = 0; i < listSize; i++) { temp = ci.getValue(colFamily, qualifier, results.get(i)); if (temp != null) { sumVal = ci.add(sumVal, ci.castToReturnType(temp)); } } results.clear(); } while (hasMoreRows); if (sumVal != null) { response = AggregateResponse.newBuilder().addFirstPart( ci.getProtoForPromotedType(sumVal).toByteString()).build(); } } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); } finally { if (scanner != null) { try { scanner.close(); } catch (IOException ignored) {} } } log.debug("Sum from this region is " + env.getRegion().getRegionInfo().getRegionNameAsString() + ": " + sum); done.run(response); }
Example 19
Source File: AggregateImplementation.java From hbase with Apache License 2.0 | 4 votes |
/** * Gives the maximum for a given combination of column qualifier and column * family, in the given row range as defined in the Scan object. In its * current implementation, it takes one column family and one column qualifier * (if provided). In case of null column qualifier, maximum value for the * entire column family will be returned. */ @Override public void getMax(RpcController controller, AggregateRequest request, RpcCallback<AggregateResponse> done) { InternalScanner scanner = null; AggregateResponse response = null; T max = null; try { ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request); T temp; Scan scan = ProtobufUtil.toScan(request.getScan()); scanner = env.getRegion().getScanner(scan); List<Cell> results = new ArrayList<>(); byte[] colFamily = scan.getFamilies()[0]; NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily); byte[] qualifier = null; if (qualifiers != null && !qualifiers.isEmpty()) { qualifier = qualifiers.pollFirst(); } // qualifier can be null. boolean hasMoreRows = false; do { hasMoreRows = scanner.next(results); int listSize = results.size(); for (int i = 0; i < listSize; i++) { temp = ci.getValue(colFamily, qualifier, results.get(i)); max = (max == null || (temp != null && ci.compare(temp, max) > 0)) ? temp : max; } results.clear(); } while (hasMoreRows); if (max != null) { AggregateResponse.Builder builder = AggregateResponse.newBuilder(); builder.addFirstPart(ci.getProtoForCellType(max).toByteString()); response = builder.build(); } } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); } finally { if (scanner != null) { try { scanner.close(); } catch (IOException ignored) {} } } log.info("Maximum from this region is " + env.getRegion().getRegionInfo().getRegionNameAsString() + ": " + max); done.run(response); }
Example 20
Source File: AggregateProtocolEndPoint.java From Eagle with Apache License 2.0 | 4 votes |
/** * Asynchronous HBase scan read as RAW qualifier * * @param scan * @param listener * @throws Exception */ protected InternalReadReport asyncStreamRead(EntityDefinition ed, Scan scan, QualifierCreationListener listener) throws IOException { // _init(); long counter = 0; long startTimestamp = 0; long stopTimestamp = 0; InternalScanner scanner = this.getCurrentRegion().getScanner(scan); List<Cell> results = new ArrayList<Cell>(); try{ boolean hasMoreRows;//false by default do{ hasMoreRows = scanner.next(results); Map<String, byte[]> kvMap = new HashMap<String, byte[]>(); if(!results.isEmpty()){ counter ++; byte[] row = results.get(0).getRow(); // if(ed.isTimeSeries()){ long timestamp = RowkeyBuilder.getTimestamp(row,ed); // Min if(startTimestamp == 0 || startTimestamp > timestamp ){ startTimestamp = timestamp; } // Max if(stopTimestamp == 0 || stopTimestamp < timestamp ){ stopTimestamp = timestamp; } // } for(Cell kv:results){ String qualifierName = Bytes.toString(kv.getQualifier()); Qualifier qualifier = null; if(!ed.isTag(qualifierName)){ qualifier = ed.getQualifierNameMap().get(qualifierName); if(qualifier == null){ LOG.error("qualifier for field " + qualifierName + " not exist"); throw new IOException(new NullPointerException("qualifier for field "+qualifierName+" is null")); } qualifierName = qualifier.getDisplayName(); } if(kv.getValue()!=null) kvMap.put(qualifierName,kv.getValue()); } // LOG.info("DEBUG: timestamp="+timestamp+", keys=["+StringUtils.join(kvMap.keySet(),",")+"]"); if(!kvMap.isEmpty()) listener.qualifierCreated(kvMap); results.clear(); }else{ if(LOG.isDebugEnabled()) LOG.warn("Empty batch of KeyValue"); } } while(hasMoreRows); } catch(IOException ex){ LOG.error(ex.getMessage(),ex); throw ex; } finally { if(scanner != null) { scanner.close(); } } return new InternalReadReport(counter,startTimestamp,stopTimestamp); }