Java Code Examples for org.apache.parquet.filter2.predicate.FilterApi#eq()

The following examples show how to use org.apache.parquet.filter2.predicate.FilterApi#eq() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ParquetFilters.java    From iceberg with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("checkstyle:MethodTypeParameterName")
private static <C extends Comparable<C>, COL extends Operators.Column<C> & Operators.SupportsLtGt>
    FilterPredicate pred(Operation op, COL col, C value) {
  switch (op) {
    case IS_NULL:
      return FilterApi.eq(col, null);
    case NOT_NULL:
      return FilterApi.notEq(col, null);
    case EQ:
      return FilterApi.eq(col, value);
    case NOT_EQ:
      return FilterApi.notEq(col, value);
    case GT:
      return FilterApi.gt(col, value);
    case GT_EQ:
      return FilterApi.gtEq(col, value);
    case LT:
      return FilterApi.lt(col, value);
    case LT_EQ:
      return FilterApi.ltEq(col, value);
    default:
      throw new UnsupportedOperationException("Unsupported predicate operation: " + op);
  }
}
 
Example 2
Source File: ParquetFilters.java    From iceberg with Apache License 2.0 6 votes vote down vote up
private static
<C extends Comparable<C>, COL extends Operators.Column<C> & Operators.SupportsLtGt>
FilterPredicate pred(Operation op, COL col, C value) {
  switch (op) {
    case IS_NULL:
      return FilterApi.eq(col, null);
    case NOT_NULL:
      return FilterApi.notEq(col, null);
    case EQ:
      return FilterApi.eq(col, value);
    case NOT_EQ:
      return FilterApi.notEq(col, value);
    case GT:
      return FilterApi.gt(col, value);
    case GT_EQ:
      return FilterApi.gtEq(col, value);
    case LT:
      return FilterApi.lt(col, value);
    case LT_EQ:
      return FilterApi.ltEq(col, value);
    default:
      throw new UnsupportedOperationException("Unsupported predicate operation: " + op);
  }
}
 
Example 3
Source File: FilteringBenchmarks.java    From parquet-mr with Apache License 2.0 5 votes vote down vote up
private void benchmark(Blackhole blackhole, BaseContext context) throws Exception {
  FilterPredicate filter = FilterApi.eq(BaseContext.COLUMN, context.getRandom().nextLong());
  try (ParquetReader<Group> reader = context.createReaderBuilder()
      .withFilter(FilterCompat.get(filter))
      .build()) {
    blackhole.consume(reader.read());
  }
}
 
Example 4
Source File: ParquetTableSourceTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testFieldsFilter() throws Exception {
	ParquetTableSource parquetTableSource = createNestedTestParquetTableSource(testPath);

	// expressions for supported predicates
	Expression exp1 = new GreaterThan(
		new PlannerResolvedFieldReference("foo", Types.LONG),
		new Literal(100L, Types.LONG));
	Expression exp2 = new EqualTo(
		new Literal(100L, Types.LONG),
		new PlannerResolvedFieldReference("bar.spam", Types.LONG));

	// unsupported predicate
	Expression unsupported = new EqualTo(
		new GetCompositeField(
			new ItemAt(
				new PlannerResolvedFieldReference(
					"nestedArray",
					ObjectArrayTypeInfo.getInfoFor(
						Types.ROW_NAMED(new String[] {"type", "name"}, Types.STRING, Types.STRING))),
					new Literal(1, Types.INT)),
					"type"),
		new Literal("test", Types.STRING));
	// invalid predicate
	Expression invalidPred = new EqualTo(
		new PlannerResolvedFieldReference("nonField", Types.LONG),
		// some invalid, non-serializable, literal (here an object of this test class)
		new Literal(new ParquetTableSourceTest(), Types.LONG)
	);

	List<Expression> exps = new ArrayList<>();
	exps.add(exp1);
	exps.add(exp2);
	exps.add(unsupported);
	exps.add(invalidPred);

	// apply predict on TableSource
	ParquetTableSource filtered = (ParquetTableSource) parquetTableSource.applyPredicate(exps);

	// ensure copy is returned
	assertNotSame(parquetTableSource, filtered);

	// ensure table schema is identical
	assertEquals(parquetTableSource.getTableSchema(), filtered.getTableSchema());

	// ensure return type is identical
	assertEquals(NESTED_ROW_TYPE, filtered.getReturnType());

	// ensure source description is not the same
	assertNotEquals(parquetTableSource.explainSource(), filtered.explainSource());

	// check that pushdown was recorded
	assertTrue(filtered.isFilterPushedDown());
	assertFalse(parquetTableSource.isFilterPushedDown());

	// ensure that supported predicates were removed from list of offered expressions
	assertEquals(2, exps.size());
	assertTrue(exps.contains(unsupported));
	assertTrue(exps.contains(invalidPred));

	// ensure ParquetInputFormat is correctly configured with filter
	DataSet<Row> data = filtered.getDataSet(ExecutionEnvironment.createLocalEnvironment());
	InputFormat<Row, ?> inputFormat = ((DataSource<Row>) data).getInputFormat();
	assertTrue(inputFormat instanceof ParquetRowInputFormat);
	ParquetRowInputFormat parquetIF = (ParquetRowInputFormat) inputFormat;

	// expected predicate
	FilterPredicate a = FilterApi.gt(FilterApi.longColumn("foo"), 100L);
	FilterPredicate b = FilterApi.eq(FilterApi.longColumn("bar.spam"), 100L);
	FilterPredicate expected = FilterApi.and(a, b);
	// actual predicate
	FilterPredicate predicate = parquetIF.getPredicate();
	// check predicate
	assertEquals(expected, predicate);
}
 
Example 5
Source File: ParquetFilters.java    From iceberg with Apache License 2.0 4 votes vote down vote up
@Override
public <T> FilterPredicate predicate(BoundPredicate<T> pred) {
  if (!(pred.term() instanceof BoundReference)) {
    throw new UnsupportedOperationException("Cannot convert non-reference to Parquet filter: " + pred.term());
  }

  Operation op = pred.op();
  BoundReference<T> ref = (BoundReference<T>) pred.term();
  String path = schema.idToAlias(ref.fieldId());
  Literal<T> lit;
  if (pred.isUnaryPredicate()) {
    lit = null;
  } else if (pred.isLiteralPredicate()) {
    lit = pred.asLiteralPredicate().literal();
  } else {
    throw new UnsupportedOperationException("Cannot convert to Parquet filter: " + pred);
  }

  switch (ref.type().typeId()) {
    case BOOLEAN:
      Operators.BooleanColumn col = FilterApi.booleanColumn(path);
      switch (op) {
        case EQ:
          return FilterApi.eq(col, getParquetPrimitive(lit));
        case NOT_EQ:
          return FilterApi.notEq(col, getParquetPrimitive(lit));
      }
      break;
    case INTEGER:
    case DATE:
      return pred(op, FilterApi.intColumn(path), getParquetPrimitive(lit));
    case LONG:
    case TIME:
    case TIMESTAMP:
      return pred(op, FilterApi.longColumn(path), getParquetPrimitive(lit));
    case FLOAT:
      return pred(op, FilterApi.floatColumn(path), getParquetPrimitive(lit));
    case DOUBLE:
      return pred(op, FilterApi.doubleColumn(path), getParquetPrimitive(lit));
    case STRING:
    case UUID:
    case FIXED:
    case BINARY:
    case DECIMAL:
      return pred(op, FilterApi.binaryColumn(path), getParquetPrimitive(lit));
  }

  throw new UnsupportedOperationException("Cannot convert to Parquet filter: " + pred);
}
 
Example 6
Source File: ParquetFilters.java    From iceberg with Apache License 2.0 4 votes vote down vote up
@Override
public <T> FilterPredicate predicate(BoundPredicate<T> pred) {
  Operation op = pred.op();
  BoundReference<T> ref = pred.ref();
  Literal<T> lit = pred.literal();
  String path = schema.idToAlias(ref.fieldId());

  switch (ref.type().typeId()) {
    case BOOLEAN:
      Operators.BooleanColumn col = FilterApi.booleanColumn(schema.idToAlias(ref.fieldId()));
      switch (op) {
        case EQ:
          return FilterApi.eq(col, getParquetPrimitive(lit));
        case NOT_EQ:
          return FilterApi.eq(col, getParquetPrimitive(lit));
      }

    case INTEGER:
      return pred(op, FilterApi.intColumn(path), getParquetPrimitive(lit));
    case LONG:
      return pred(op, FilterApi.longColumn(path), getParquetPrimitive(lit));
    case FLOAT:
      return pred(op, FilterApi.floatColumn(path), getParquetPrimitive(lit));
    case DOUBLE:
      return pred(op, FilterApi.doubleColumn(path), getParquetPrimitive(lit));
    case DATE:
      return pred(op, FilterApi.intColumn(path), getParquetPrimitive(lit));
    case TIME:
      return pred(op, FilterApi.longColumn(path), getParquetPrimitive(lit));
    case TIMESTAMP:
      return pred(op, FilterApi.longColumn(path), getParquetPrimitive(lit));
    case STRING:
      return pred(op, FilterApi.binaryColumn(path), getParquetPrimitive(lit));
    case UUID:
      return pred(op, FilterApi.binaryColumn(path), getParquetPrimitive(lit));
    case FIXED:
      return pred(op, FilterApi.binaryColumn(path), getParquetPrimitive(lit));
    case BINARY:
      return pred(op, FilterApi.binaryColumn(path), getParquetPrimitive(lit));
    case DECIMAL:
      return pred(op, FilterApi.binaryColumn(path), getParquetPrimitive(lit));
  }

  throw new UnsupportedOperationException("Cannot convert to Parquet filter: " + pred);
}
 
Example 7
Source File: ParquetTableSourceTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testFieldsFilter() throws Exception {
	ParquetTableSource parquetTableSource = createNestedTestParquetTableSource(testPath);

	// expressions for supported predicates
	Expression exp1 = new GreaterThan(
		new PlannerResolvedFieldReference("foo", Types.LONG),
		new Literal(100L, Types.LONG));
	Expression exp2 = new EqualTo(
		new Literal(100L, Types.LONG),
		new PlannerResolvedFieldReference("bar.spam", Types.LONG));

	// unsupported predicate
	Expression unsupported = new EqualTo(
		new GetCompositeField(
			new ItemAt(
				new PlannerResolvedFieldReference(
					"nestedArray",
					ObjectArrayTypeInfo.getInfoFor(
						Types.ROW_NAMED(new String[] {"type", "name"}, Types.STRING, Types.STRING))),
					new Literal(1, Types.INT)),
					"type"),
		new Literal("test", Types.STRING));
	// invalid predicate
	Expression invalidPred = new EqualTo(
		new PlannerResolvedFieldReference("nonField", Types.LONG),
		// some invalid, non-serializable, literal (here an object of this test class)
		new Literal(new ParquetTableSourceTest(), Types.LONG)
	);

	List<Expression> exps = new ArrayList<>();
	exps.add(exp1);
	exps.add(exp2);
	exps.add(unsupported);
	exps.add(invalidPred);

	// apply predict on TableSource
	ParquetTableSource filtered = (ParquetTableSource) parquetTableSource.applyPredicate(exps);

	// ensure copy is returned
	assertNotSame(parquetTableSource, filtered);

	// ensure table schema is identical
	assertEquals(parquetTableSource.getTableSchema(), filtered.getTableSchema());

	// ensure return type is identical
	assertEquals(NESTED_ROW_TYPE, filtered.getReturnType());

	// ensure source description is not the same
	assertNotEquals(parquetTableSource.explainSource(), filtered.explainSource());

	// check that pushdown was recorded
	assertTrue(filtered.isFilterPushedDown());
	assertFalse(parquetTableSource.isFilterPushedDown());

	// ensure that supported predicates were removed from list of offered expressions
	assertEquals(2, exps.size());
	assertTrue(exps.contains(unsupported));
	assertTrue(exps.contains(invalidPred));

	// ensure ParquetInputFormat is correctly configured with filter
	DataSet<Row> data = filtered.getDataSet(ExecutionEnvironment.createLocalEnvironment());
	InputFormat<Row, ?> inputFormat = ((DataSource<Row>) data).getInputFormat();
	assertTrue(inputFormat instanceof ParquetRowInputFormat);
	ParquetRowInputFormat parquetIF = (ParquetRowInputFormat) inputFormat;

	// expected predicate
	FilterPredicate a = FilterApi.gt(FilterApi.longColumn("foo"), 100L);
	FilterPredicate b = FilterApi.eq(FilterApi.longColumn("bar.spam"), 100L);
	FilterPredicate expected = FilterApi.and(a, b);
	// actual predicate
	FilterPredicate predicate = parquetIF.getPredicate();
	// check predicate
	assertEquals(expected, predicate);
}