Java Code Examples for org.apache.hadoop.hbase.util.Bytes#toBytesBinary()

The following examples show how to use org.apache.hadoop.hbase.util.Bytes#toBytesBinary() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MockHTable.java    From foxtrot with Apache License 2.0 6 votes vote down vote up
/**
 * Read a value saved in the object. Useful for making assertions in tests.
 *
 * @param rowid  rowid of the data to read
 * @param column family:qualifier of the data to read
 * @return value or null if row or column of the row does not exist
 */
public byte[] read(String rowid, String column) {
    NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> row = data.get(Bytes.toBytesBinary(rowid));
    if(row == null)
        return null;
    String[] fq = split(column);
    byte[] family = Bytes.toBytesBinary(fq[0]);
    byte[] qualifier = Bytes.toBytesBinary(fq[1]);
    if(!row.containsKey(family))
        return null;
    if(!row.get(family)
            .containsKey(qualifier))
        return null;
    return row.get(family)
            .get(qualifier)
            .lastEntry()
            .getValue();
}
 
Example 2
Source File: MockHTable.java    From foxtrot with Apache License 2.0 6 votes vote down vote up
/**
 * Helper method of pre-loaders, adds parameters to data.
 *
 * @param ret    data to load into
 * @param row    rowid
 * @param column family:qualifier encoded value
 * @param val    value
 */
private static void put(MockHTable ret, String row, String column, String val) {
    String[] fq = split(column);
    byte[] family = Bytes.toBytesBinary(fq[0]);
    byte[] qualifier = Bytes.toBytesBinary(fq[1]);
    NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> families = ret.forceFind(ret.data, Bytes.toBytesBinary(row),
                                                                                                    new TreeMap<byte[],
                                                                                                            NavigableMap<byte[],
                                                                                                                    NavigableMap<Long, byte[]>>>(
                                                                                                            Bytes.BYTES_COMPARATOR)
                                                                                                   );
    NavigableMap<byte[], NavigableMap<Long, byte[]>> qualifiers = ret.forceFind(families, family,
                                                                                new TreeMap<byte[], NavigableMap<Long, byte[]>>(
                                                                                        Bytes.BYTES_COMPARATOR)
                                                                               );
    NavigableMap<Long, byte[]> values = ret.forceFind(qualifiers, qualifier, new TreeMap<Long, byte[]>());
    values.put(System.currentTimeMillis(), Bytes.toBytesBinary(val));
}
 
Example 3
Source File: IntegrationTestBigLinkedList.java    From hbase with Apache License 2.0 6 votes vote down vote up
@Override
public int run(String[] args) throws Exception {
  if (args.length != 1) {
    System.out.println("Usage : " + Delete.class.getSimpleName() + " <node to delete>");
    return 0;
  }
  byte[] val = Bytes.toBytesBinary(args[0]);

  org.apache.hadoop.hbase.client.Delete delete
    = new org.apache.hadoop.hbase.client.Delete(val);

  try (Connection connection = ConnectionFactory.createConnection(getConf());
      Table table = connection.getTable(getTableName(getConf()))) {
    table.delete(delete);
  }

  System.out.println("Delete successful");
  return 0;
}
 
Example 4
Source File: HbaseUtil.java    From DataLink with Apache License 2.0 5 votes vote down vote up
public static byte[] convertInnerEndRowkey(Configuration configuration) {
    String endRowkey = configuration.getString(Key.END_ROWKEY);
    if (StringUtils.isBlank(endRowkey)) {
        return HConstants.EMPTY_BYTE_ARRAY;
    }

    return Bytes.toBytesBinary(endRowkey);
}
 
Example 5
Source File: CreateTableChange.java    From pinpoint with Apache License 2.0 5 votes vote down vote up
@Override
public byte[][] getSplitKeys() {
    byte[][] splits = new byte[splitKeys.size()][];
    for (int i = 0; i < splitKeys.size(); i++) {
        splits[i] = Bytes.toBytesBinary(splitKeys.get(i));
    }
    return splits;
}
 
Example 6
Source File: GroupingTableMapper.java    From hbase with Apache License 2.0 5 votes vote down vote up
/**
 * Create a key by concatenating multiple column values.
 * <p>
 * Override this function in order to produce different types of keys.
 *
 * @param vals  The current key/values.
 * @return A key generated by concatenating multiple column values.
 */
protected ImmutableBytesWritable createGroupKey(byte[][] vals) {
  if(vals == null) {
    return null;
  }
  StringBuilder sb =  new StringBuilder();
  for(int i = 0; i < vals.length; i++) {
    if(i > 0) {
      sb.append(" ");
    }
    sb.append(Bytes.toString(vals[i]));
  }
  return new ImmutableBytesWritable(Bytes.toBytesBinary(sb.toString()));
}
 
Example 7
Source File: SimpleTotalOrderPartitioner.java    From hbase with Apache License 2.0 5 votes vote down vote up
private static byte[] getKeyFromConf(Configuration conf,
    String base64Key, String deprecatedKey) {
  String encoded = conf.get(base64Key);
  if (encoded != null) {
    return Base64.getDecoder().decode(encoded);
  }
  String oldStyleVal = conf.get(deprecatedKey);
  if (oldStyleVal == null) {
    return null;
  }
  LOG.warn("Using deprecated configuration " + deprecatedKey +
      " - please use static accessor methods instead.");
  return Bytes.toBytesBinary(oldStyleVal);
}
 
Example 8
Source File: GroupingTableMap.java    From hbase with Apache License 2.0 5 votes vote down vote up
/**
 * Create a key by concatenating multiple column values.
 * Override this function in order to produce different types of keys.
 *
 * @param vals
 * @return key generated by concatenating multiple column values
 */
protected ImmutableBytesWritable createGroupKey(byte[][] vals) {
  if(vals == null) {
    return null;
  }
  StringBuilder sb =  new StringBuilder();
  for(int i = 0; i < vals.length; i++) {
    if(i > 0) {
      sb.append(" ");
    }
    sb.append(Bytes.toString(vals[i]));
  }
  return new ImmutableBytesWritable(Bytes.toBytesBinary(sb.toString()));
}
 
Example 9
Source File: MetaBrowser.java    From hbase with Apache License 2.0 5 votes vote down vote up
private static byte[] resolveScanStart(final HttpServletRequest request) {
  // TODO: handle replicas that fall between the last rowkey and pagination limit.
  final String requestValue = resolveRequestParameter(request, SCAN_START_PARAM);
  if (requestValue == null) {
    return null;
  }
  return Bytes.toBytesBinary(requestValue);
}
 
Example 10
Source File: HbaseUtil.java    From DataLink with Apache License 2.0 5 votes vote down vote up
private static byte[] stringToBytes(String rowkey, boolean isBinaryRowkey) {
    if (isBinaryRowkey) {
        return Bytes.toBytesBinary(rowkey);
    } else {
        return Bytes.toBytes(rowkey);
    }
}
 
Example 11
Source File: HbaseUtil.java    From DataLink with Apache License 2.0 5 votes vote down vote up
/**
 * 注意:convertUserStartRowkey 和 convertInnerStartRowkey,前者会受到 isBinaryRowkey 的影响,只用于第一次对用户配置的 String 类型的 rowkey 转为二进制时使用。而后者约定:切分时得到的二进制的 rowkey 回填到配置中时采用
 */
public static byte[] convertInnerStartRowkey(Configuration configuration) {
    String startRowkey = configuration.getString(Key.START_ROWKEY);
    if (StringUtils.isBlank(startRowkey)) {
        return HConstants.EMPTY_BYTE_ARRAY;
    }

    return Bytes.toBytesBinary(startRowkey);
}
 
Example 12
Source File: HFilePrettyPrinter.java    From hbase with Apache License 2.0 4 votes vote down vote up
public boolean parseOptions(String args[]) throws ParseException,
    IOException {
  if (args.length == 0) {
    HelpFormatter formatter = new HelpFormatter();
    formatter.printHelp("hfile", options, true);
    return false;
  }
  CommandLineParser parser = new PosixParser();
  CommandLine cmd = parser.parse(options, args);

  verbose = cmd.hasOption("v");
  printValue = cmd.hasOption("p");
  printKey = cmd.hasOption("e") || printValue;
  shouldPrintMeta = cmd.hasOption("m");
  printBlockIndex = cmd.hasOption("b");
  printBlockHeaders = cmd.hasOption("h");
  printStats = cmd.hasOption("s");
  checkRow = cmd.hasOption("k");
  checkFamily = cmd.hasOption("a");
  checkMobIntegrity = cmd.hasOption("i");

  if (cmd.hasOption("f")) {
    files.add(new Path(cmd.getOptionValue("f")));
  }

  if (cmd.hasOption("w")) {
    String key = cmd.getOptionValue("w");
    if (key != null && key.length() != 0) {
      row = Bytes.toBytesBinary(key);
      isSeekToRow = true;
    } else {
      err.println("Invalid row is specified.");
      System.exit(-1);
    }
  }

  if (cmd.hasOption("r")) {
    String regionName = cmd.getOptionValue("r");
    byte[] rn = Bytes.toBytes(regionName);
    byte[][] hri = RegionInfo.parseRegionName(rn);
    Path rootDir = CommonFSUtils.getRootDir(getConf());
    Path tableDir = CommonFSUtils.getTableDir(rootDir, TableName.valueOf(hri[0]));
    String enc = RegionInfo.encodeRegionName(rn);
    Path regionDir = new Path(tableDir, enc);
    if (verbose)
      out.println("region dir -> " + regionDir);
    List<Path> regionFiles = HFile.getStoreFiles(FileSystem.get(getConf()),
        regionDir);
    if (verbose)
      out.println("Number of region files found -> "
          + regionFiles.size());
    if (verbose) {
      int i = 1;
      for (Path p : regionFiles) {
        if (verbose)
          out.println("Found file[" + i++ + "] -> " + p);
      }
    }
    files.addAll(regionFiles);
  }

  if(checkMobIntegrity) {
    if (verbose) {
      System.out.println("checkMobIntegrity is enabled");
    }
    mobFileLocations = new HashMap<>();
  }

  cmd.getArgList().forEach((file) -> files.add(new Path(file)));

  return true;
}
 
Example 13
Source File: TestFuzzyRowFilterEndToEnd.java    From hbase with Apache License 2.0 4 votes vote down vote up
@Test
public void testAllFixedBits() throws IOException {
  String cf = "f";
  String cq = "q";

  Table ht =
      TEST_UTIL.createTable(TableName.valueOf(name.getMethodName()), Bytes.toBytes(cf), Integer.MAX_VALUE);
  // Load data
  String[] rows = new String[] { "\\x9C\\x00\\x044\\x00\\x00\\x00\\x00",
      "\\x9C\\x00\\x044\\x01\\x00\\x00\\x00", "\\x9C\\x00\\x044\\x00\\x01\\x00\\x00",
      "\\x9B\\x00\\x044e\\x9B\\x02\\xBB", "\\x9C\\x00\\x044\\x00\\x00\\x01\\x00",
      "\\x9C\\x00\\x044\\x00\\x01\\x00\\x01", "\\x9B\\x00\\x044e\\xBB\\xB2\\xBB", };

  for (int i = 0; i < rows.length; i++) {
    Put p = new Put(Bytes.toBytesBinary(rows[i]));
    p.addColumn(Bytes.toBytes(cf), Bytes.toBytes(cq), Bytes.toBytes("value"));
    ht.put(p);
  }

  TEST_UTIL.flush();

  List<Pair<byte[], byte[]>> data = new ArrayList<>();
  byte[] fuzzyKey = Bytes.toBytesBinary("\\x9B\\x00\\x044e");
  byte[] mask = new byte[] { 0, 0, 0, 0, 0 };

  // copy the fuzzy key and mask to test HBASE-18617
  byte[] copyFuzzyKey = Arrays.copyOf(fuzzyKey, fuzzyKey.length);
  byte[] copyMask = Arrays.copyOf(mask, mask.length);

  data.add(new Pair<>(fuzzyKey, mask));
  FuzzyRowFilter filter = new FuzzyRowFilter(data);

  Scan scan = new Scan();
  scan.setFilter(filter);

  ResultScanner scanner = ht.getScanner(scan);
  int total = 0;
  while (scanner.next() != null) {
    total++;
  }
  assertEquals(2, total);

  assertEquals(true, Arrays.equals(copyFuzzyKey, fuzzyKey));
  assertEquals(true, Arrays.equals(copyMask, mask));

  TEST_UTIL.deleteTable(TableName.valueOf(name.getMethodName()));
}
 
Example 14
Source File: RowKeyBytesStringFunctionIT.java    From phoenix with Apache License 2.0 4 votes vote down vote up
@Test
public void getRowKeyBytesAndVerify() throws Exception {
    try (Connection conn = DriverManager.getConnection(getUrl())) {
        int[] values = {3,7,9,158,5};
        String tableName = generateUniqueName();
        String ddl =
                "CREATE TABLE IF NOT EXISTS " + tableName + " "
                        + "(id INTEGER NOT NULL, pkcol VARCHAR, page_id UNSIGNED_LONG,"
                        + " \"DATE\" BIGINT, \"value\" INTEGER,"
                        + " constraint pk primary key(id, pkcol)) COLUMN_ENCODED_BYTES = 0";
        conn.createStatement().execute(ddl);

        conn.createStatement().execute("UPSERT INTO " + tableName
                + " (id, pkcol, page_id, \"DATE\", \"value\") VALUES (1, 'a', 8, 1," + values[0] + ")");
        conn.createStatement().execute("UPSERT INTO " + tableName
                + " (id, pkcol, page_id, \"DATE\", \"value\") VALUES (2, 'ab', 8, 2," + values[1] + ")");
        conn.createStatement().execute("UPSERT INTO " + tableName
                + " (id, pkcol, page_id, \"DATE\", \"value\") VALUES (3, 'abc', 8, 3," + values[2] + ")");
        conn.createStatement().execute("UPSERT INTO " + tableName
                + " (id, pkcol, page_id, \"DATE\", \"value\") VALUES (5, 'abcde', 8, 5," + values[4] + ")");
        conn.createStatement().execute("UPSERT INTO " + tableName
                + " (id, pkcol, page_id, \"DATE\", \"value\") VALUES (4, 'abcd', 8, 4," + values[3] + ")");
        conn.commit();

        ResultSet rs =
                conn.createStatement().executeQuery("SELECT ROWKEY_BYTES_STRING() FROM " + tableName);
        try (org.apache.hadoop.hbase.client.Connection hconn =
                ConnectionFactory.createConnection(config)) {
            Table table = hconn.getTable(TableName.valueOf(tableName));
            int i = 0;
            while (rs.next()) {
                String s = rs.getString(1);
                Get get = new Get(Bytes.toBytesBinary(s));
                Result hbaseRes = table.get(get);
                assertFalse(hbaseRes.isEmpty());
                assertTrue(Bytes.equals(hbaseRes.getValue(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, Bytes.toBytes("value")), 
                    PInteger.INSTANCE.toBytes(values[i])));
                i++;
            }
        }
    }
}
 
Example 15
Source File: HBase_1_1_2_ClientService.java    From nifi with Apache License 2.0 4 votes vote down vote up
@Override
public byte[] toBytesBinary(String s) {
    return Bytes.toBytesBinary(s);
}
 
Example 16
Source File: RegionSplitter.java    From phoenix-omid with Apache License 2.0 4 votes vote down vote up
@Override
public void setFirstRow(String userInput) {
    firstRowBytes = Bytes.toBytesBinary(userInput);
}
 
Example 17
Source File: MockHBaseClientService.java    From localization_nifi with Apache License 2.0 4 votes vote down vote up
@Override
public byte[] toBytesBinary(String s) {
   return Bytes.toBytesBinary(s);
}
 
Example 18
Source File: TestKeyValue.java    From hbase with Apache License 2.0 4 votes vote down vote up
@Test
public void testCheckKeyValueBytesFailureCase() throws Exception {
  byte[][] inputs = new byte[][] { HConstants.EMPTY_BYTE_ARRAY, // case.0
    Bytes.toBytesBinary("a"), // case.1
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x01"), // case.2
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x01\\x00"), // case.3
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x01"), // case.4
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x01\\x00"), // case.5
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x01\\x00\\x01"), // case.6
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x01\\x00\\x03ROW"), // case.7
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x01\\x00\\x03ROW\\x01"), // case.8
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x01\\x00\\x03ROW\\x01FQ\\xFF"
        + "\\xFF\\xFF\\xFF\\xFF\\xFF\\xFF\\xFF\\x03"), // case.9
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x01\\x00\\x03ROW\\x01FQ\\x00"
        + "\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x03"), // case.10
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x01\\x00\\x03ROW\\x01FQ\\x00"
        + "\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x04"), // case.11
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x01\\x00\\x03ROW\\x01FQ\\x00"
        + "\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x04VALUE"), // case.12
  };
  String[] outputs = new String[] { "Overflow when reading key length at position=0",
    "Overflow when reading key length at position=0",
    "Invalid key length in KeyValue. keyLength=1",
    "Overflow when reading value length at position=4",
    "Invalid value length in KeyValue, valueLength=1",
    "Overflow when reading row length at position=8",
    "Invalid row length in KeyValue, rowLength=1",
    "Overflow when reading family length at position=13",
    "Invalid family length in KeyValue, familyLength=1", "Timestamp cannot be negative, ts=-1",
    "Invalid type in KeyValue, type=3", "Overflow when reading value part at position=25",
    "Invalid tags length in KeyValue at position=26"};
  byte[][] withTagsInputs = new byte[][] {
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x01\\x00\\x03ROW\\x01FQ\\x00"
        + "\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x04V\\x01"), // case.13
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x01\\x00\\x03ROW\\x01FQ\\x00"
        + "\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x04V\\x00\\x01"), // case.14
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x01\\x00\\x03ROW\\x01FQ\\x00"
        + "\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x04V\\x00\\x04\\x00\\x03\\x00A"), // case.15
    // case.16
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x01\\x00\\x03ROW\\x01FQ\\x00"
        + "\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x04V\\x00\\x0A\\x00\\x04\\x00TAG\\x00\\x04"
        + "\\xFFT"),
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x01\\x00\\x03ROW\\x01FQ\\x00"
        + "\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x04V\\x00\\x0C\\x00\\x04\\x00TAG\\x00\\x05"
        + "\\xF0COME\\x00"), // case.17
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x01\\x00\\x03ROW\\x01FQ\\x00"
        + "\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x04V\\x00\\x0C\\x00\\x04\\x00TAG\\x00\\x05"
        + "\\xF0COME"), // case.18
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x01\\x00\\x03ROW\\x01FQ\\x00"
        + "\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x04V\\x00\\x00"), // case.19
    Bytes.toBytesBinary("\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x01\\x00\\x03ROW\\x01FQ\\x00"
        + "\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x04V\\x00\\x1B\\x00\\x05\\x01TAG1\\x00\\x05"
        + "\\x02TAG2\\x00\\x05\\x03TAG3\\x00\\x05\\x04TAG4"), // case.20
  };
  String[] withTagsOutputs = new String[] { "Overflow when reading tags length at position=26",
    "Invalid tags length in KeyValue at position=26",
    "Invalid tag length at position=28, tagLength=3",
    "Invalid tag length at position=34, tagLength=4",
    "Some redundant bytes in KeyValue's buffer, startOffset=41, endOffset=42", null, null,
    null,
  };
  assertEquals(inputs.length, outputs.length);
  assertEquals(withTagsInputs.length, withTagsOutputs.length);

  FailureCase[] cases = new FailureCase[inputs.length + withTagsInputs.length];
  for (int i = 0; i < inputs.length; i++) {
    cases[i] = new FailureCase(inputs[i], 0, inputs[i].length, false, outputs[i]);
  }
  for (int i = 0; i < withTagsInputs.length; i++) {
    cases[inputs.length + i] =
        new FailureCase(withTagsInputs[i], 0, withTagsInputs[i].length, true, withTagsOutputs[i]);
  }

  for (int i = 0; i < cases.length; i++) {
    FailureCase c = cases[i];
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream os = new DataOutputStream(baos);
    ByteBufferUtils.putInt(os, c.length);
    os.write(c.buf, c.offset, c.length);
    try {
      KeyValueUtil.createKeyValueFromInputStream(
        new DataInputStream(new ByteArrayInputStream(baos.toByteArray())), c.withTags);
      if (c.expectedMessage != null) {
        fail("Should fail when parse key value from an invalid bytes for case#" + i + ". " + c);
      }
    } catch (IllegalArgumentException e) {
      assertEquals("Case#" + i + " failed," + c, c.getExpectedMessage(), e.getMessage());
    }
  }
}
 
Example 19
Source File: HBaseStorage.java    From spork with Apache License 2.0 4 votes vote down vote up
private void initScan() throws IOException{
    scan = new Scan();

    scan.setCacheBlocks(cacheBlocks_);
    scan.setCaching(caching_);

    // Set filters, if any.
    if (configuredOptions_.hasOption("gt")) {
        gt_ = Bytes.toBytesBinary(Utils.slashisize(configuredOptions_.getOptionValue("gt")));
        addRowFilter(CompareOp.GREATER, gt_);
        scan.setStartRow(gt_);
    }
    if (configuredOptions_.hasOption("lt")) {
        lt_ = Bytes.toBytesBinary(Utils.slashisize(configuredOptions_.getOptionValue("lt")));
        addRowFilter(CompareOp.LESS, lt_);
        scan.setStopRow(lt_);
    }
    if (configuredOptions_.hasOption("gte")) {
        gte_ = Bytes.toBytesBinary(Utils.slashisize(configuredOptions_.getOptionValue("gte")));
        scan.setStartRow(gte_);
    }
    if (configuredOptions_.hasOption("lte")) {
        lte_ = Bytes.toBytesBinary(Utils.slashisize(configuredOptions_.getOptionValue("lte")));
        byte[] lt = increment(lte_);
        if (LOG.isDebugEnabled()) {
            LOG.debug(String.format("Incrementing lte value of %s from bytes %s to %s to set stop row",
                      Bytes.toString(lte_), toString(lte_), toString(lt)));
        }

        if (lt != null) {
            scan.setStopRow(increment(lte_));
        }

        // The WhileMatchFilter will short-circuit the scan after we no longer match. The
        // setStopRow call will limit the number of regions we need to scan
        addFilter(new WhileMatchFilter(new RowFilter(CompareOp.LESS_OR_EQUAL, new BinaryComparator(lte_))));
    }
    if (configuredOptions_.hasOption("regex")) {
        regex_ = Utils.slashisize(configuredOptions_.getOptionValue("regex"));
        addFilter(new RowFilter(CompareOp.EQUAL, new RegexStringComparator(regex_)));
    }
    if (configuredOptions_.hasOption("minTimestamp") || configuredOptions_.hasOption("maxTimestamp")){
        scan.setTimeRange(minTimestamp_, maxTimestamp_);
    }
    if (configuredOptions_.hasOption("timestamp")){
        scan.setTimeStamp(timestamp_);
    }

    // if the group of columnInfos for this family doesn't contain a prefix, we don't need
    // to set any filters, we can just call addColumn or addFamily. See javadocs below.
    boolean columnPrefixExists = false;
    for (ColumnInfo columnInfo : columnInfo_) {
        if (columnInfo.getColumnPrefix() != null) {
            columnPrefixExists = true;
            break;
        }
    }

    if (!columnPrefixExists) {
        addFiltersWithoutColumnPrefix(columnInfo_);
    }
    else {
        addFiltersWithColumnPrefix(columnInfo_);
    }
}
 
Example 20
Source File: HBase_1_1_2_ClientService.java    From localization_nifi with Apache License 2.0 4 votes vote down vote up
@Override
public byte[] toBytesBinary(String s) {
    return Bytes.toBytesBinary(s);
}