org.apache.beam.sdk.extensions.sql.SqlTransform Java Examples

The following examples show how to use org.apache.beam.sdk.extensions.sql.SqlTransform. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CustomTableResolverTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testLongCompoundIdInNonDefaultSchemaDifferentNames() throws Exception {
  CustomResolutionTestTableProvider tableProvider = new CustomResolutionTestTableProvider();
  tableProvider.createTable(
      Table.builder().name("testtable_blah_foo_bar").schema(BASIC_SCHEMA).type("test").build());
  tableProvider.addRows("testtable_blah_foo_bar", row(1, "one"), row(2, "two"));

  CustomResolutionTestTableProvider tableProvider2 = new CustomResolutionTestTableProvider();
  tableProvider2.createTable(
      Table.builder()
          .name("testtable2_blah2_foo2_bar2")
          .schema(BASIC_SCHEMA)
          .type("test")
          .build());
  tableProvider2.addRows("testtable2_blah2_foo2_bar2", row(3, "three"), row(4, "four"));

  PCollection<Row> result =
      pipeline.apply(
          SqlTransform.query("SELECT id, name FROM testprovider2.testtable2.blah2.foo2.bar2")
              .withTableProvider("testprovider2", tableProvider2)
              .withDefaultTableProvider("testprovider", tableProvider));

  PAssert.that(result).containsInAnyOrder(row(3, "three"), row(4, "four"));

  pipeline.run().waitUntilFinish(Duration.standardMinutes(2));
}
 
Example #2
Source File: CustomTableResolverTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testLongCompoundIdInNonDefaultSchemaSameTableNames() throws Exception {
  CustomResolutionTestTableProvider tableProvider = new CustomResolutionTestTableProvider();
  tableProvider.createTable(
      Table.builder().name("testtable_blah_foo_bar").schema(BASIC_SCHEMA).type("test").build());
  tableProvider.addRows("testtable_blah_foo_bar", row(1, "one"), row(2, "two"));

  CustomResolutionTestTableProvider tableProvider2 = new CustomResolutionTestTableProvider();
  tableProvider2.createTable(
      Table.builder().name("testtable_blah_foo_bar").schema(BASIC_SCHEMA).type("test").build());
  tableProvider2.addRows("testtable_blah_foo_bar", row(3, "three"), row(4, "four"));

  PCollection<Row> result =
      pipeline.apply(
          SqlTransform.query("SELECT id, name FROM testprovider2.testtable.blah.foo.bar")
              .withTableProvider("testprovider2", tableProvider2)
              .withDefaultTableProvider("testprovider", tableProvider));

  PAssert.that(result).containsInAnyOrder(row(3, "three"), row(4, "four"));

  pipeline.run().waitUntilFinish(Duration.standardMinutes(2));
}
 
Example #3
Source File: CustomTableResolverTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testLongCompoundIdInExplicitDefaultSchema() throws Exception {
  CustomResolutionTestTableProvider tableProvider = new CustomResolutionTestTableProvider();
  tableProvider.createTable(
      Table.builder().name("testtable_blah_foo_bar").schema(BASIC_SCHEMA).type("test").build());
  tableProvider.addRows("testtable_blah_foo_bar", row(1, "one"), row(2, "two"));

  PCollection<Row> result =
      pipeline.apply(
          SqlTransform.query("SELECT id, name FROM testprovider.testtable.blah.foo.bar")
              .withDefaultTableProvider("testprovider", tableProvider));

  PAssert.that(result).containsInAnyOrder(row(1, "one"), row(2, "two"));

  pipeline.run().waitUntilFinish(Duration.standardMinutes(2));
}
 
Example #4
Source File: CustomTableResolverTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testLongCompoundIdInDefaultSchemaWithMultipleProviders() throws Exception {
  CustomResolutionTestTableProvider tableProvider = new CustomResolutionTestTableProvider();
  tableProvider.createTable(
      Table.builder().name("testtable_blah_foo_bar").schema(BASIC_SCHEMA).type("test").build());
  tableProvider.addRows("testtable_blah_foo_bar", row(1, "one"), row(2, "two"));

  CustomResolutionTestTableProvider tableProvider2 = new CustomResolutionTestTableProvider();
  tableProvider2.createTable(
      Table.builder().name("testtable_blah_foo_bar").schema(BASIC_SCHEMA).type("test").build());
  tableProvider2.addRows("testtable_blah_foo_bar", row(3, "three"), row(4, "four"));

  PCollection<Row> result =
      pipeline.apply(
          SqlTransform.query("SELECT id, name FROM testtable.blah.foo.bar")
              .withTableProvider("testprovider2", tableProvider2)
              .withDefaultTableProvider("testprovider", tableProvider));

  PAssert.that(result).containsInAnyOrder(row(1, "one"), row(2, "two"));

  pipeline.run().waitUntilFinish(Duration.standardMinutes(2));
}
 
Example #5
Source File: CustomTableResolverTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testLongCompoundIdInDefaultSchema() throws Exception {
  CustomResolutionTestTableProvider tableProvider = new CustomResolutionTestTableProvider();
  tableProvider.createTable(
      Table.builder().name("testtable_blah_foo_bar").schema(BASIC_SCHEMA).type("test").build());
  tableProvider.addRows("testtable_blah_foo_bar", row(1, "one"), row(2, "two"));

  PCollection<Row> result =
      pipeline.apply(
          SqlTransform.query("SELECT id, name FROM testtable.blah.foo.bar")
              .withDefaultTableProvider("testprovider", tableProvider));

  PAssert.that(result).containsInAnyOrder(row(1, "one"), row(2, "two"));

  pipeline.run().waitUntilFinish(Duration.standardMinutes(2));
}
 
Example #6
Source File: CustomTableResolverTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testCompoundIdInExplicitDefaultSchema() throws Exception {
  CustomResolutionTestTableProvider tableProvider = new CustomResolutionTestTableProvider();
  tableProvider.createTable(
      Table.builder().name("testtable_blah").schema(BASIC_SCHEMA).type("test").build());
  tableProvider.addRows("testtable_blah", row(1, "one"), row(2, "two"));

  PCollection<Row> result =
      pipeline.apply(
          SqlTransform.query("SELECT id, name FROM testprovider.testtable.blah")
              .withDefaultTableProvider("testprovider", tableProvider));

  PAssert.that(result).containsInAnyOrder(row(1, "one"), row(2, "two"));

  pipeline.run().waitUntilFinish(Duration.standardMinutes(2));
}
 
Example #7
Source File: CustomTableResolverTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testCompoundIdInDefaultSchema() throws Exception {
  CustomResolutionTestTableProvider tableProvider = new CustomResolutionTestTableProvider();
  tableProvider.createTable(
      Table.builder().name("testtable_blah").schema(BASIC_SCHEMA).type("test").build());
  tableProvider.addRows("testtable_blah", row(1, "one"), row(2, "two"));

  PCollection<Row> result =
      pipeline.apply(
          SqlTransform.query("SELECT id, name FROM testtable.blah")
              .withDefaultTableProvider("testprovider", tableProvider));

  PAssert.that(result).containsInAnyOrder(row(1, "one"), row(2, "two"));

  pipeline.run().waitUntilFinish(Duration.standardMinutes(2));
}
 
Example #8
Source File: CustomTableResolverTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testSimpleIdWithExplicitNonDefaultSchema() throws Exception {
  CustomResolutionTestTableProvider tableProvider = new CustomResolutionTestTableProvider();
  tableProvider.createTable(
      Table.builder().name("testtable").schema(BASIC_SCHEMA).type("test").build());
  tableProvider.addRows("testtable", row(1, "one"), row(2, "two"));

  CustomResolutionTestTableProvider tableProvider2 = new CustomResolutionTestTableProvider();
  tableProvider2.createTable(
      Table.builder().name("testtable2").schema(BASIC_SCHEMA).type("test").build());
  tableProvider2.addRows("testtable2", row(3, "three"), row(4, "four"));

  PCollection<Row> result =
      pipeline.apply(
          SqlTransform.query("SELECT id, name FROM testprovider2.testtable2")
              .withTableProvider("testprovider2", tableProvider2)
              .withDefaultTableProvider("testprovider", tableProvider));

  PAssert.that(result).containsInAnyOrder(row(3, "three"), row(4, "four"));

  pipeline.run().waitUntilFinish(Duration.standardMinutes(2));
}
 
Example #9
Source File: CustomTableResolverTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testSimpleIdWithExplicitDefaultSchemaWithMultipleProviders() throws Exception {
  CustomResolutionTestTableProvider tableProvider = new CustomResolutionTestTableProvider();
  tableProvider.createTable(
      Table.builder().name("testtable").schema(BASIC_SCHEMA).type("test").build());
  tableProvider.addRows("testtable", row(1, "one"), row(2, "two"));

  CustomResolutionTestTableProvider tableProvider2 = new CustomResolutionTestTableProvider();
  tableProvider2.createTable(
      Table.builder().name("testtable2").schema(BASIC_SCHEMA).type("test").build());
  tableProvider2.addRows("testtable2", row(3, "three"), row(4, "four"));

  PCollection<Row> result =
      pipeline.apply(
          SqlTransform.query("SELECT id, name FROM testprovider2.testtable2")
              .withTableProvider("testprovider2", tableProvider2)
              .withDefaultTableProvider("testprovider", tableProvider));

  PAssert.that(result).containsInAnyOrder(row(3, "three"), row(4, "four"));

  pipeline.run().waitUntilFinish(Duration.standardMinutes(2));
}
 
Example #10
Source File: CustomTableResolverTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testSimpleIdWithExplicitDefaultSchema() throws Exception {
  CustomResolutionTestTableProvider tableProvider = new CustomResolutionTestTableProvider();
  tableProvider.createTable(
      Table.builder().name("testtable").schema(BASIC_SCHEMA).type("test").build());
  tableProvider.addRows("testtable", row(1, "one"), row(2, "two"));

  PCollection<Row> result =
      pipeline.apply(
          SqlTransform.query("SELECT id, name FROM testprovider.testtable")
              .withDefaultTableProvider("testprovider", tableProvider));

  PAssert.that(result).containsInAnyOrder(row(1, "one"), row(2, "two"));

  pipeline.run().waitUntilFinish(Duration.standardMinutes(2));
}
 
Example #11
Source File: CustomTableResolverTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testSimpleId() {
  CustomResolutionTestTableProvider tableProvider = new CustomResolutionTestTableProvider();
  tableProvider.createTable(
      Table.builder().name("testtable").schema(BASIC_SCHEMA).type("test").build());
  tableProvider.addRows("testtable", row(1, "one"), row(2, "two"));

  PCollection<Row> result =
      pipeline.apply(
          SqlTransform.query("SELECT id, name FROM testtable")
              .withDefaultTableProvider("testprovider", tableProvider));

  PAssert.that(result).containsInAnyOrder(row(1, "one"), row(2, "two"));

  pipeline.run().waitUntilFinish(Duration.standardMinutes(2));
}
 
Example #12
Source File: SqlQuery3.java    From beam with Apache License 2.0 6 votes vote down vote up
@Override
public PCollection<NameCityStateId> expand(PCollection<Event> allEvents) {
  PCollection<Event> windowed =
      allEvents.apply(
          Window.into(FixedWindows.of(Duration.standardSeconds(configuration.windowSizeSec))));

  String auctionName = Auction.class.getSimpleName();
  PCollection<Row> auctions =
      windowed
          .apply(getName() + ".Filter." + auctionName, Filter.by(e1 -> e1.newAuction != null))
          .apply(getName() + ".ToRecords." + auctionName, new SelectEvent(Type.AUCTION));

  String personName = Person.class.getSimpleName();
  PCollection<Row> people =
      windowed
          .apply(getName() + ".Filter." + personName, Filter.by(e -> e.newPerson != null))
          .apply(getName() + ".ToRecords." + personName, new SelectEvent(Type.PERSON));

  PCollectionTuple inputStreams =
      PCollectionTuple.of(new TupleTag<>("Auction"), auctions)
          .and(new TupleTag<>("Person"), people);

  return inputStreams
      .apply(SqlTransform.query(QUERY).withQueryPlannerClass(plannerClass))
      .apply(Convert.fromRows(NameCityStateId.class));
}
 
Example #13
Source File: BeamSalUhfSpecialTypeAndValueTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testIsNan() throws Exception {
  Schema resultType =
      Schema.builder()
          .addBooleanField("field_1")
          .addBooleanField("field_2")
          .addBooleanField("field_3")
          .addBooleanField("field_4")
          .build();
  Row resultRow = Row.withSchema(resultType).addValues(false, false, true, true).build();

  String sql =
      "SELECT IS_NAN(f_float_2), IS_NAN(f_double_2), IS_NAN(f_float_3), IS_NAN(f_double_3) FROM PCOLLECTION";
  PCollection<Row> result = boundedInputFloatDouble.apply("testUdf", SqlTransform.query(sql));
  PAssert.that(result).containsInAnyOrder(resultRow);
  pipeline.run().waitUntilFinish();
}
 
Example #14
Source File: BeamSqlHiveSchemaTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testJoinPCollectionWithHCatalog() throws Exception {
  initializeHCatalog();

  PCollection<Row> inputMain =
      pipeline.apply("pcollection", create(row(1, "pcollection_1"), row(2, "pcollection_2")));

  PCollection<Row> result =
      inputMain.apply(
          SqlTransform.query(
                  "SELECT hive.f_int, (hive.f_str || ' ' || pcollection.f_string) AS f_string \n"
                      + "FROM `hive`.`default`.`mytable` AS hive \n"
                      + "   INNER JOIN \n"
                      + " PCOLLECTION AS pcollection \n"
                      + "   ON pcollection.f_int = hive.f_int")
              .withTableProvider("hive", hiveTableProvider()));

  PAssert.that(result)
      .containsInAnyOrder(row(1, "record 1 pcollection_1"), row(2, "record 2 pcollection_2"));
  pipeline.run();
}
 
Example #15
Source File: BeamSalUhfSpecialTypeAndValueTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testMd5() throws Exception {
  Schema resultType = Schema.builder().addByteArrayField("field").build();
  Row resultRow1 =
      Row.withSchema(resultType).addValues(DigestUtils.md5("foobar".getBytes(UTF_8))).build();
  Row resultRow2 =
      Row.withSchema(resultType).addValues(DigestUtils.md5(" ".getBytes(UTF_8))).build();
  Row resultRow3 =
      Row.withSchema(resultType)
          .addValues(DigestUtils.md5("abcABCжщфЖЩФ".getBytes(UTF_8)))
          .build();
  String sql = "SELECT MD5(f_bytes) FROM PCOLLECTION WHERE f_func = 'HashingFn'";
  PCollection<Row> result = boundedInputBytes.apply("testUdf", SqlTransform.query(sql));
  PAssert.that(result).containsInAnyOrder(resultRow1, resultRow2, resultRow3);
  pipeline.run().waitUntilFinish();
}
 
Example #16
Source File: BeamSalUhfSpecialTypeAndValueTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testSHA1() throws Exception {
  Schema resultType = Schema.builder().addByteArrayField("field").build();
  Row resultRow1 =
      Row.withSchema(resultType).addValues(DigestUtils.sha1("foobar".getBytes(UTF_8))).build();
  Row resultRow2 =
      Row.withSchema(resultType).addValues(DigestUtils.sha1(" ".getBytes(UTF_8))).build();
  Row resultRow3 =
      Row.withSchema(resultType)
          .addValues(DigestUtils.sha1("abcABCжщфЖЩФ".getBytes(UTF_8)))
          .build();
  String sql = "SELECT SHA1(f_bytes) FROM PCOLLECTION WHERE f_func = 'HashingFn'";
  PCollection<Row> result = boundedInputBytes.apply("testUdf", SqlTransform.query(sql));
  PAssert.that(result).containsInAnyOrder(resultRow1, resultRow2, resultRow3);
  pipeline.run().waitUntilFinish();
}
 
Example #17
Source File: BeamSalUhfSpecialTypeAndValueTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testSHA256() throws Exception {
  Schema resultType = Schema.builder().addByteArrayField("field").build();
  Row resultRow1 =
      Row.withSchema(resultType).addValues(DigestUtils.sha256("foobar".getBytes(UTF_8))).build();
  Row resultRow2 =
      Row.withSchema(resultType).addValues(DigestUtils.sha256(" ".getBytes(UTF_8))).build();
  Row resultRow3 =
      Row.withSchema(resultType)
          .addValues(DigestUtils.sha256("abcABCжщфЖЩФ".getBytes(UTF_8)))
          .build();
  String sql = "SELECT SHA256(f_bytes) FROM PCOLLECTION WHERE f_func = 'HashingFn'";
  PCollection<Row> result = boundedInputBytes.apply("testUdf", SqlTransform.query(sql));
  PAssert.that(result).containsInAnyOrder(resultRow1, resultRow2, resultRow3);
  pipeline.run().waitUntilFinish();
}
 
Example #18
Source File: BeamSalUhfSpecialTypeAndValueTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testSHA512() throws Exception {
  Schema resultType = Schema.builder().addByteArrayField("field").build();
  Row resultRow1 =
      Row.withSchema(resultType).addValues(DigestUtils.sha512("foobar".getBytes(UTF_8))).build();
  Row resultRow2 =
      Row.withSchema(resultType).addValues(DigestUtils.sha512(" ".getBytes(UTF_8))).build();
  Row resultRow3 =
      Row.withSchema(resultType)
          .addValues(DigestUtils.sha512("abcABCжщфЖЩФ".getBytes(UTF_8)))
          .build();
  String sql = "SELECT SHA512(f_bytes) FROM PCOLLECTION WHERE f_func = 'HashingFn'";
  PCollection<Row> result = boundedInputBytes.apply("testUdf", SqlTransform.query(sql));
  PAssert.that(result).containsInAnyOrder(resultRow1, resultRow2, resultRow3);
  pipeline.run().waitUntilFinish();
}
 
Example #19
Source File: BeamSalUhfSpecialTypeAndValueTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testIsInf() throws Exception {
  Schema resultType =
      Schema.builder()
          .addBooleanField("field_1")
          .addBooleanField("field_2")
          .addBooleanField("field_3")
          .addBooleanField("field_4")
          .build();
  Row resultRow = Row.withSchema(resultType).addValues(true, true, true, true).build();

  String sql =
      "SELECT IS_INF(f_float_1), IS_INF(f_double_1), IS_INF(f_float_2), IS_INF(f_double_2) FROM PCOLLECTION";
  PCollection<Row> result = boundedInputFloatDouble.apply("testUdf", SqlTransform.query(sql));
  PAssert.that(result).containsInAnyOrder(resultRow);
  pipeline.run().waitUntilFinish();
}
 
Example #20
Source File: BeamSqlBuiltinFunctionsIntegrationTestBase.java    From beam with Apache License 2.0 6 votes vote down vote up
@Override
public PDone expand(PBegin begin) {
  PCollection<Boolean> result =
      begin
          .apply(Create.of(DUMMY_ROW).withRowSchema(DUMMY_SCHEMA))
          .apply(SqlTransform.query("SELECT " + expr))
          .apply(MapElements.into(TypeDescriptors.booleans()).via(row -> row.getBoolean(0)));

  PAssert.that(result)
      .satisfies(
          input -> {
            assertTrue("Test expression is false: " + expr, Iterables.getOnlyElement(input));
            return null;
          });
  return PDone.in(begin.getPipeline());
}
 
Example #21
Source File: BeamSqlDataCatalogExample.java    From beam with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
  LOG.info("Args: {}", Arrays.asList(args));
  DCExamplePipelineOptions options =
      PipelineOptionsFactory.fromArgs(args).as(DCExamplePipelineOptions.class);
  LOG.info("Query: {}\nOutput: {}", options.getQueryString(), options.getOutputFilePrefix());

  Pipeline pipeline = Pipeline.create(options);

  validateArgs(options);

  try (DataCatalogTableProvider tableProvider =
      DataCatalogTableProvider.create(options.as(DataCatalogPipelineOptions.class))) {
    pipeline
        .apply(
            "SQL Query",
            SqlTransform.query(options.getQueryString())
                .withDefaultTableProvider("datacatalog", tableProvider))
        .apply("Convert to Strings", rowsToStrings())
        .apply("Write output", TextIO.write().to(options.getOutputFilePrefix()));

    pipeline.run().waitUntilFinish();
  }
}
 
Example #22
Source File: ExternalSqlTransformRegistrar.java    From beam with Apache License 2.0 6 votes vote down vote up
@Override
public PTransform<PInput, PCollection<Row>> buildExternal(Configuration configuration) {
  SqlTransform transform = SqlTransform.query(configuration.query);
  if (configuration.dialect != null) {
    Class<? extends QueryPlanner> queryPlanner =
        DIALECTS.get(configuration.dialect.toLowerCase());
    if (queryPlanner == null) {
      throw new IllegalArgumentException(
          String.format(
              "Received unknown SQL Dialect '%s'. Known dialects: %s",
              configuration.dialect, DIALECTS.keySet()));
    }
    transform = transform.withQueryPlannerClass(queryPlanner);
  }
  return transform;
}
 
Example #23
Source File: BeamSalUhfSpecialTypeAndValueTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testReverse() throws Exception {
  byte[] testByets = "абвгд".getBytes(UTF_8);
  ArrayUtils.reverse(testByets);
  Schema resultType = Schema.builder().addByteArrayField("field").build();
  Row resultRow = Row.withSchema(resultType).addValues(testByets).build();
  Row resultRow2 = Row.withSchema(resultType).addValues("\1\0".getBytes(UTF_8)).build();
  Row resultRow3 = Row.withSchema(resultType).addValues("".getBytes(UTF_8)).build();
  String sql = "SELECT REVERSE(f_bytes) FROM PCOLLECTION WHERE f_func = 'LENGTH'";
  PCollection<Row> result = boundedInputBytes.apply("testUdf", SqlTransform.query(sql));
  PAssert.that(result).containsInAnyOrder(resultRow, resultRow2, resultRow3);
  pipeline.run().waitUntilFinish();
}
 
Example #24
Source File: BeamSqlBuiltinFunctionsIntegrationTestBase.java    From beam with Apache License 2.0 5 votes vote down vote up
/** Build the corresponding SQL, compile to Beam Pipeline, run it, and check the result. */
public void buildRunAndCheck(PCollection<Row> inputCollection) {

  for (ExpressionTestCase testCase : exps) {
    String expression = testCase.sqlExpr();
    Object expectedValue = testCase.expectedResult();
    String sql = String.format("SELECT %s FROM PCOLLECTION", expression);
    Schema schema;
    if (expectedValue == null) {
      schema =
          Schema.builder().addNullableField(expression, testCase.resultFieldType()).build();
    } else {
      schema = Schema.builder().addField(expression, testCase.resultFieldType()).build();
    }

    PCollection<Row> output =
        inputCollection.apply(testCase.toString(), SqlTransform.query(sql));

    // For floating point number(Double and Float), it's allowed to have some precision delta,
    // other types can use regular equality check.
    if (expectedValue instanceof Double) {
      PAssert.that(output).satisfies(matchesScalar((double) expectedValue, PRECISION_DOUBLE));
    } else if (expectedValue instanceof Float) {
      PAssert.that(output).satisfies(matchesScalar((float) expectedValue, PRECISION_FLOAT));
    } else {
      PAssert.that(output)
          .containsInAnyOrder(
              TestUtils.RowsBuilder.of(schema).addRows(expectedValue).getRows());
    }
  }

  inputCollection.getPipeline().run();
}
 
Example #25
Source File: BeamSalUhfSpecialTypeAndValueTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testLength() throws Exception {
  Schema resultType = Schema.builder().addInt64Field("field").build();
  Row resultRow = Row.withSchema(resultType).addValues(10L).build();
  Row resultRow2 = Row.withSchema(resultType).addValues(0L).build();
  Row resultRow3 = Row.withSchema(resultType).addValues(2L).build();
  String sql = "SELECT LENGTH(f_bytes) FROM PCOLLECTION WHERE f_func = 'LENGTH'";
  PCollection<Row> result = boundedInputBytes.apply("testUdf", SqlTransform.query(sql));
  PAssert.that(result).containsInAnyOrder(resultRow, resultRow2, resultRow3);
  pipeline.run().waitUntilFinish();
}
 
Example #26
Source File: SqlQuery5.java    From beam with Apache License 2.0 5 votes vote down vote up
public SqlQuery5(NexmarkConfiguration configuration) {
  super("SqlQuery5");

  String queryString =
      String.format(QUERY_TEMPLATE, configuration.windowPeriodSec, configuration.windowSizeSec);
  query = SqlTransform.query(queryString);
}
 
Example #27
Source File: BeamSalUhfSpecialTypeAndValueTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testToHex() throws Exception {
  Schema resultType = Schema.builder().addStringField("field").build();
  Row resultRow = Row.withSchema(resultType).addValue("666f6f626172").build();
  Row resultRow2 = Row.withSchema(resultType).addValue("20").build();
  Row resultRow3 = Row.withSchema(resultType).addValue("616263414243").build();
  Row resultRow4 =
      Row.withSchema(resultType).addValue("616263414243d0b6d189d184d096d0a9d0a4").build();

  String sql = "SELECT TO_HEX(f_bytes) FROM PCOLLECTION WHERE f_func = 'TO_HEX'";
  PCollection<Row> result = boundedInputBytes.apply("testUdf", SqlTransform.query(sql));
  PAssert.that(result).containsInAnyOrder(resultRow, resultRow2, resultRow3, resultRow4);
  pipeline.run().waitUntilFinish();
}
 
Example #28
Source File: BeamSalUhfSpecialTypeAndValueTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testLeftPad() throws Exception {
  Schema resultType = Schema.builder().addNullableField("field", FieldType.BYTES).build();
  Row resultRow = Row.withSchema(resultType).addValue("".getBytes(UTF_8)).build();
  Row resultRow2 = Row.withSchema(resultType).addValue("abcdef".getBytes(UTF_8)).build();
  Row resultRow3 = Row.withSchema(resultType).addValue("abcd".getBytes(UTF_8)).build();
  Row resultRow4 = Row.withSchema(resultType).addValue("defgabcdef".getBytes(UTF_8)).build();
  Row resultRow5 = Row.withSchema(resultType).addValue("defghdeabc".getBytes(UTF_8)).build();
  Row resultRow6 = Row.withSchema(resultType).addValue("----abc".getBytes(UTF_8)).build();
  Row resultRow7 = Row.withSchema(resultType).addValue("defdefd".getBytes(UTF_8)).build();
  Row resultRow8 = Row.withSchema(resultType).addValue(null).build();

  String sql = "SELECT LPAD(f_bytes_one, length, f_bytes_two) FROM PCOLLECTION";
  PCollection<Row> result =
      boundedInputBytesPaddingTest.apply("testUdf", SqlTransform.query(sql));
  PAssert.that(result)
      .containsInAnyOrder(
          resultRow,
          resultRow2,
          resultRow3,
          resultRow4,
          resultRow5,
          resultRow6,
          resultRow7,
          resultRow8);
  pipeline.run().waitUntilFinish();
}
 
Example #29
Source File: BeamSalUhfSpecialTypeAndValueTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testRightPad() throws Exception {
  Schema resultType = Schema.builder().addNullableField("field", FieldType.BYTES).build();
  Row resultRow = Row.withSchema(resultType).addValue("".getBytes(UTF_8)).build();
  Row resultRow2 = Row.withSchema(resultType).addValue("abcdef".getBytes(UTF_8)).build();
  Row resultRow3 = Row.withSchema(resultType).addValue("abcd".getBytes(UTF_8)).build();
  Row resultRow4 = Row.withSchema(resultType).addValue("abcdefdefg".getBytes(UTF_8)).build();
  Row resultRow5 = Row.withSchema(resultType).addValue("abcdefghde".getBytes(UTF_8)).build();
  Row resultRow6 = Row.withSchema(resultType).addValue("abc----".getBytes(UTF_8)).build();
  Row resultRow7 = Row.withSchema(resultType).addValue("defdefd".getBytes(UTF_8)).build();
  Row resultRow8 = Row.withSchema(resultType).addValue(null).build();

  String sql = "SELECT RPAD(f_bytes_one, length, f_bytes_two) FROM PCOLLECTION";
  PCollection<Row> result =
      boundedInputBytesPaddingTest.apply("testUdf", SqlTransform.query(sql));
  PAssert.that(result)
      .containsInAnyOrder(
          resultRow,
          resultRow2,
          resultRow3,
          resultRow4,
          resultRow5,
          resultRow6,
          resultRow7,
          resultRow8);
  pipeline.run().waitUntilFinish();
}
 
Example #30
Source File: SqlQuery2.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public PCollection<AuctionPrice> expand(PCollection<Event> allEvents) {
  return allEvents
      .apply(Filter.by(NexmarkQueryUtil.IS_BID))
      .apply(getName() + ".SelectEvent", new SelectEvent(Type.BID))
      .apply(
          SqlTransform.query(String.format(QUERY_TEMPLATE, skipFactor))
              .withQueryPlannerClass(plannerClass))
      .apply(Convert.fromRows(AuctionPrice.class));
}