org.apache.avro.generic.GenericData.Record Java Examples

The following examples show how to use org.apache.avro.generic.GenericData.Record. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestAvroTypeUtil.java    From nifi with Apache License 2.0 6 votes vote down vote up
/**
 * The issue consists on having an Avro's schema with a default value in an
 * array. See
 * <a href="https://issues.apache.org/jira/browse/NIFI-4893">NIFI-4893</a>.
 * @throws IOException
 *             schema not found.
 */
@Test
public void testDefaultArrayValuesInRecordsCase2() throws IOException {
    Schema avroSchema = new Schema.Parser().parse(getClass().getResourceAsStream("defaultArrayInRecords2.json"));
    GenericRecordBuilder builder = new GenericRecordBuilder(avroSchema);
    Record field1Record = new GenericRecordBuilder(avroSchema.getField("field1").schema()).build();
    builder.set("field1", field1Record);
    Record r = builder.build();

    @SuppressWarnings("unchecked")
    GenericData.Array<Integer> values = (GenericData.Array<Integer>) ((GenericRecord) r.get("field1"))
            .get("listOfInt");
    assertArrayEquals(new Object[] {1,2,3}, values.toArray());
    RecordSchema record = AvroTypeUtil.createSchema(avroSchema);
    RecordField field = record.getField("field1").get();
    assertEquals(RecordFieldType.RECORD, field.getDataType().getFieldType());
    RecordDataType data = (RecordDataType) field.getDataType();
    RecordSchema childSchema = data.getChildSchema();
    RecordField childField = childSchema.getField("listOfInt").get();
    assertEquals(RecordFieldType.ARRAY, childField.getDataType().getFieldType());
    assertTrue(childField.getDefaultValue() instanceof Object[]);
    assertArrayEquals(new Object[] {1,2,3}, ((Object[]) childField.getDefaultValue()));
}
 
Example #2
Source File: MarketoOpportunityClientTest.java    From components with Apache License 2.0 6 votes vote down vote up
@Test
public void testDeleteOpportunities() throws Exception {
    oprops.customObjectDeleteBy.setValue(CustomObjectDeleteBy.idField);
    //
    doThrow(new MarketoException("REST", "error")).when(client).executePostRequest(eq(SyncResult.class),
            any(JsonObject.class));
    List<IndexedRecord> records = new ArrayList<>();
    IndexedRecord record = new Record(MarketoConstants.getCustomObjectRecordSchema());
    record.put(0, "mkto-123456");
    records.add(record);
    mktoSR = client.deleteOpportunities(oprops, records);
    assertFalse(mktoSR.isSuccess());
    assertFalse(mktoSR.getErrorsString().isEmpty());
    //
    doReturn(new SyncResult()).when(client).executePostRequest(eq(SyncResult.class), any(JsonObject.class));
    mktoSR = client.deleteOpportunities(oprops, records);
    assertFalse(mktoSR.isSuccess());
    //
    doReturn(getListOperationResult(true, "deleted")).when(client).executePostRequest(eq(SyncResult.class),
            any(JsonObject.class));
    mktoSR = client.deleteOpportunities(oprops, records);
    assertTrue(mktoSR.isSuccess());
    assertTrue(mktoSR.getErrorsString().isEmpty());

}
 
Example #3
Source File: ConsumerRecordWriterTest.java    From data-highway with Apache License 2.0 6 votes vote down vote up
@Test
public void write_Flush() throws IOException {
  when(outputStreamFactory.create(LOCATION)).thenReturn(abortableOutputStream);
  ArgumentCaptor<OutputStream> captor = ArgumentCaptor.forClass(OutputStream.class);
  when(recordWriterFactory.create(eq(schema1), captor.capture())).thenReturn(recordWriter);

  underTest.getByteCounter().getAndAdd(3L); // fake some written bytes
  ConsumerRecord<Void, Record> record = record(schema1, "foo", 1, 10);
  underTest.write(record);

  verify(recordWriter).write(record.value());
  assertThat(underTest.getRecordCounter().get(), is(0L));
  verify(metrics).consumedBytes(10);
  verify(metrics).offsetHighwaterMark(0, 1);
  verify(metrics).uploadedBytes(3L);
  verify(metrics).uploadedEvents(1L);
  assertThat(writers.size(), is(0));
}
 
Example #4
Source File: TestKiteStorageProcessor.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testIncompatibleSchema() throws IOException {
    Schema incompatible = SchemaBuilder.record("User").fields()
            .requiredLong("id")
            .requiredString("username")
            .optionalString("email") // the dataset requires this field
            .endRecord();

    // this user has the email field and could be stored, but the schema is
    // still incompatible so the entire stream is rejected
    Record incompatibleUser = new Record(incompatible);
    incompatibleUser.put("id", 1L);
    incompatibleUser.put("username", "a");
    incompatibleUser.put("email", "[email protected]");

    TestRunner runner = TestRunners.newTestRunner(StoreInKiteDataset.class);
    runner.setProperty(StoreInKiteDataset.KITE_DATASET_URI, datasetUri);
    runner.assertValid();

    runner.enqueue(streamFor(incompatibleUser));
    runner.run();

    runner.assertAllFlowFilesTransferred("incompatible", 1);
}
 
Example #5
Source File: MarketoListOperationWriterTest.java    From components with Apache License 2.0 6 votes vote down vote up
@Test
public void testRetryOperationFailDieOnError() throws Exception {
    IndexedRecord record = new Record(MarketoConstants.getListOperationRESTSchema());
    record.put(0, 12345);
    record.put(1, 54321);
    doReturn(false).when(client).isErrorRecoverable(any(List.class));
    doReturn(getFailedSyncResult("REST", "902", "Invalid operation")).when(client)
            .addToList(any(ListOperationParameters.class));
    writer.open("test");
    writer.write(record);
    try {
        writer.close();
        fail("Should not be here");
    } catch (Exception e) {
        assertTrue(e.getMessage().contains("902"));
    }
}
 
Example #6
Source File: TruckParkTest.java    From data-highway with Apache License 2.0 6 votes vote down vote up
@Test
public void test() throws Exception {
  Schema schema = SchemaBuilder.record("r").fields().name("f").type().stringType().noDefault().endRecord();
  Record value = new Record(schema);

  ConsumerRecord<Void, Record> record1 = new ConsumerRecord<>(topic, 0, 1, null, value);
  ConsumerRecords<Void, Record> records1 = new ConsumerRecords<>(
      ImmutableMap.of(partition, ImmutableList.of(record1)));

  when(consumer.poll(pollTimeout)).thenReturn(records1);

  underTest.run(null);

  InOrder inOrder = inOrder(consumer, writer, context);
  inOrder.verify(consumer).assign(partitions);
  inOrder.verify(consumer).seek(partition, 1L);
  inOrder.verify(consumer).poll(pollTimeout);
  inOrder.verify(writer).write(record1);
  inOrder.verify(consumer).pause(partitions);
  inOrder.verify(writer).close();
  inOrder.verify(context).close();
}
 
Example #7
Source File: RandomData.java    From iceberg with Apache License 2.0 6 votes vote down vote up
private static Iterable<Record> newIterable(Supplier<RandomDataGenerator> newGenerator,
                                            Schema schema, int numRecords) {
  return () -> new Iterator<Record>() {
    private int count = 0;
    private RandomDataGenerator generator = newGenerator.get();

    @Override
    public boolean hasNext() {
      return count < numRecords;
    }

    @Override
    public Record next() {
      if (count >= numRecords) {
        throw new NoSuchElementException();
      }
      count += 1;
      return (Record) TypeUtil.visit(schema, generator);
    }
  };
}
 
Example #8
Source File: TestReadProjection.java    From iceberg with Apache License 2.0 6 votes vote down vote up
@Test
public void testReorderedProjection() throws Exception {
  Schema schema = new Schema(
      Types.NestedField.required(0, "id", Types.LongType.get()),
      Types.NestedField.optional(1, "data", Types.StringType.get())
  );

  Record record = new Record(AvroSchemaUtil.convert(schema, "table"));
  record.put("id", 34L);
  record.put("data", "test");

  Schema reordered = new Schema(
      Types.NestedField.optional(2, "missing_1", Types.StringType.get()),
      Types.NestedField.optional(1, "data", Types.StringType.get()),
      Types.NestedField.optional(3, "missing_2", Types.LongType.get())
  );

  Record projected = writeAndRead("full_projection", schema, reordered, record);

  Assert.assertNull("Should contain the correct 0 value", projected.get(0));
  Assert.assertEquals("Should contain the correct 1 value", "test", projected.get(1).toString());
  Assert.assertNull("Should contain the correct 2 value", projected.get(2));
}
 
Example #9
Source File: MarketoSOAPClientTest.java    From components with Apache License 2.0 6 votes vote down vote up
@Test
public void testSyncMultipleLeads() throws Exception {
    doReturn(getSyncMultipleLeadsResult()).when(port).syncMultipleLeads(any(ParamsSyncMultipleLeads.class),
            any(AuthenticationHeader.class));
    oprops.afterOutputOperation();
    oprops.beforeMappingInput();
    mktoSR = client.syncLead(oprops, null);
    assertNotNull(mktoSR);
    assertFalse(mktoSR.isSuccess());
    IndexedRecord record = new Record(MarketoConstants.getSOAPOutputSchemaForSyncLead());
    record.put(0, 12345);
    record.put(1, "[email protected]");
    mktoSR = client.syncMultipleLeads(oprops, Arrays.asList(record));
    assertNotNull(mktoSR);
    assertTrue(mktoSR.isSuccess());
    //
    doThrow(new RuntimeException("error")).when(port).syncMultipleLeads(any(ParamsSyncMultipleLeads.class),
            any(AuthenticationHeader.class));
    mktoSR = client.syncMultipleLeads(oprops, Arrays.asList(record));
    assertNotNull(mktoSR);
    assertFalse(mktoSR.isSuccess());
}
 
Example #10
Source File: TestFileSystemDataset.java    From kite with Apache License 2.0 6 votes vote down vote up
@Test(expected = ValidationException.class)
public void testCannotMergeDatasetsWithDifferentFormats() throws IOException {
  FileSystemDataset<Record> ds = new FileSystemDataset.Builder<Record>()
      .namespace("ns")
      .name("users")
      .configuration(getConfiguration())
      .descriptor(new DatasetDescriptor.Builder()
          .schema(USER_SCHEMA)
          .format(Formats.AVRO)
          .location(testDirectory)
          .build())
      .type(Record.class)
      .build();
  FileSystemDataset<Record> dsUpdate = new FileSystemDataset.Builder<Record>()
      .namespace("ns")
      .name("users")
      .configuration(getConfiguration())
      .descriptor(new DatasetDescriptor.Builder()
          .schema(USER_SCHEMA)
          .format(Formats.PARQUET)
          .location(testDirectory)
          .build())
      .type(Record.class)
      .build();
  ds.merge(dsUpdate);
}
 
Example #11
Source File: MarketoCompanyClientTest.java    From components with Apache License 2.0 6 votes vote down vote up
@Test
public void testDeleteCompany() throws Exception {
    oprops.customObjectDeleteBy.setValue(CustomObjectDeleteBy.idField);
    //
    doThrow(new MarketoException("REST", "error")).when(client).executePostRequest(eq(SyncResult.class),
            any(JsonObject.class));
    List<IndexedRecord> records = new ArrayList<>();
    IndexedRecord record = new Record(MarketoConstants.getCustomObjectRecordSchema());
    record.put(0, "mkto-123456");
    records.add(record);
    mktoSR = client.deleteCompany(oprops, records);
    assertFalse(mktoSR.isSuccess());
    assertFalse(mktoSR.getErrorsString().isEmpty());
    //
    doReturn(new SyncResult()).when(client).executePostRequest(eq(SyncResult.class), any(JsonObject.class));
    mktoSR = client.deleteCompany(oprops, records);
    assertFalse(mktoSR.isSuccess());
    //
    doReturn(getListOperationResult(true, "deleted")).when(client).executePostRequest(eq(SyncResult.class),
            any(JsonObject.class));
    mktoSR = client.deleteCompany(oprops, records);
    assertTrue(mktoSR.isSuccess());
    assertTrue(mktoSR.getErrorsString().isEmpty());
}
 
Example #12
Source File: TestMapReduce.java    From kite with Apache License 2.0 6 votes vote down vote up
@Test
@SuppressWarnings("deprecation")
public void testSignalReadyOutputView() throws Exception {
  Assume.assumeTrue(!Hadoop.isHadoop1());
  populateInputDataset();
  populateOutputDataset(); // existing output will be overwritten

  Job job = new Job();
  DatasetKeyInputFormat.configure(job).readFrom(inputDataset).withType(GenericData.Record.class);

  job.setMapperClass(LineCountMapper.class);
  job.setMapOutputKeyClass(Text.class);
  job.setMapOutputValueClass(IntWritable.class);

  job.setReducerClass(GenericStatsReducer.class);

  View<Record> outputView = outputDataset.with("name", "apple", "banana", "carrot");
  DatasetKeyOutputFormat.configure(job).appendTo(outputView).withType(GenericData.Record.class);

  Assert.assertTrue(job.waitForCompletion(true));

  Assert.assertFalse("Output dataset should not be signaled ready",
      ((Signalable)outputDataset).isReady());
  Assert.assertTrue("Output view should be signaled ready",
      ((Signalable)outputView).isReady());
}
 
Example #13
Source File: MarketoListOperationWriterTest.java    From components with Apache License 2.0 6 votes vote down vote up
@Test
public void testWriteSOAP() throws Exception {
    doReturn(getSuccessSyncResult("added")).when(client).addToList(any(ListOperationParameters.class));
    IndexedRecord record = new Record(MarketoConstants.getListOperationSOAPSchema());
    record.put(0, "MKTOLISTNAME");
    record.put(1, "TESTS");
    record.put(2, "ID");
    record.put(3, "12345");
    props.connection.apiMode.setValue(APIMode.SOAP);
    props.dieOnError.setValue(false);
    props.multipleOperation.setValue(true);
    props.schemaInput.schema.setValue(MarketoConstants.getListOperationSOAPSchema());
    props.updateOutputSchemas();
    when(client.getApi()).thenReturn("SOAP");
    when(sink.getProperties()).thenReturn(props);
    writer.open("test");
    writer.write(record);
    assertEquals(1, writer.getSuccessfulWrites().size());
    record.put(1, "TEST2");
    writer.write(record);
    assertNotNull(writer.close());
    assertEquals(1, writer.getSuccessfulWrites().size());
    //
}
 
Example #14
Source File: TestAvroRecordConverter.java    From nifi with Apache License 2.0 6 votes vote down vote up
/**
 * Tests the case where we want to default map one field and explicitly map
 * another.
 */
@Test
public void testExplicitMapping() throws Exception {
    // We will convert s1 from string to long (or leave it null), ignore s2,
    // convert l1 from long to string, and leave l2 the same.
    Schema input = NESTED_RECORD_SCHEMA;
    Schema parent = NESTED_PARENT_SCHEMA;
    Schema output = UNNESTED_OUTPUT_SCHEMA;
    Map<String, String> mapping = ImmutableMap.of("parent.id", "parentId");

    AvroRecordConverter converter = new AvroRecordConverter(input, output,
            mapping);

    Record inputRecord = new Record(input);
    inputRecord.put("l1", 5L);
    inputRecord.put("s1", "1000");
    Record parentRecord = new Record(parent);
    parentRecord.put("id", 200L);
    parentRecord.put("name", "parent");
    inputRecord.put("parent", parentRecord);
    Record outputRecord = converter.convert(inputRecord);
    assertEquals(5L, outputRecord.get("l1"));
    assertEquals(1000L, outputRecord.get("s1"));
    assertEquals(200L, outputRecord.get("parentId"));
}
 
Example #15
Source File: MarketoSOAPClientTest.java    From components with Apache License 2.0 6 votes vote down vote up
@Test
public void testSyncLead() throws Exception {
    doReturn(getSyncLeadResult()).when(port).syncLead(any(ParamsSyncLead.class), any(AuthenticationHeader.class),
            any(MktowsContextHeader.class));
    oprops.afterOutputOperation();
    oprops.beforeMappingInput();
    mktoSR = client.syncLead(oprops, null);
    assertNotNull(mktoSR);
    assertFalse(mktoSR.isSuccess());
    IndexedRecord record = new Record(MarketoConstants.getSOAPOutputSchemaForSyncLead());
    record.put(0, 12345);
    record.put(1, "[email protected]");
    mktoSR = client.syncLead(oprops, record);
    assertNotNull(mktoSR);
    assertTrue(mktoSR.isSuccess());
    //
    doThrow(new RuntimeException("error")).when(port).syncLead(any(ParamsSyncLead.class), any(AuthenticationHeader.class),
            any(MktowsContextHeader.class));
    mktoSR = client.syncLead(oprops, record);
    assertNotNull(mktoSR);
    assertFalse(mktoSR.isSuccess());
}
 
Example #16
Source File: TestConvertAvroSchema.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
private Record convertBasic(Record inputRecord, Locale locale) {
    Record result = new Record(OUTPUT_SCHEMA);
    result.put("id", Long.parseLong(inputRecord.get("id").toString()));
    result.put("color", inputRecord.get("primaryColor").toString());
    if (inputRecord.get("price") == null) {
        result.put("price", null);
    } else {
        final NumberFormat format = NumberFormat.getInstance(locale);
        double price;
        try {
            price = format.parse(inputRecord.get("price").toString()).doubleValue();
        } catch (ParseException e) {
            // Shouldn't happen
            throw new RuntimeException(e);
        }
        result.put("price", price);
    }
    return result;
}
 
Example #17
Source File: TestAvroRecordConverter.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
/**
 * Tests the case where we try to convert a string to a long incorrectly.
 */
@Test(expected = org.apache.nifi.processors.kite.AvroRecordConverter.AvroConversionException.class)
public void testIllegalConversion() throws Exception {
    // We will convert s1 from string to long (or leave it null), ignore s2,
    // convert l1 from long to string, and leave l2 the same.
    Schema input = SchemaBuilder.record("Input")
            .namespace("com.cloudera.edh").fields()
            .nullableString("s1", "").requiredString("s2")
            .optionalLong("l1").requiredLong("l2").endRecord();
    Schema output = SchemaBuilder.record("Output")
            .namespace("com.cloudera.edh").fields().optionalLong("s1")
            .optionalString("l1").requiredLong("l2").endRecord();

    AvroRecordConverter converter = new AvroRecordConverter(input, output,
            EMPTY_MAPPING);

    Record inputRecord = new Record(input);
    inputRecord.put("s1", "blah");
    inputRecord.put("s2", "blah");
    inputRecord.put("l1", null);
    inputRecord.put("l2", 5L);
    converter.convert(inputRecord);
}
 
Example #18
Source File: MarketoInputWriterTest.java    From components with Apache License 2.0 6 votes vote down vote up
@Before
public void setUp() throws Exception {
    super.setUp();

    props = new TMarketoInputProperties("test");
    props.connection.setupProperties();
    props.setupProperties();
    props.leadKeyValues.setValue("email");
    props.connection.maxReconnAttemps.setValue(2);
    props.connection.attemptsIntervalTime.setValue(500);
    props.updateSchemaRelated();
    when(sink.getProperties()).thenReturn(props);
    wop = new MarketoWriteOperation(sink);
    writer = new MarketoInputWriter(wop, null);
    writer.properties = props;
    assertTrue(writer instanceof MarketoInputWriter);

    record = new Record(MarketoConstants.getRESTSchemaForGetLeadOrGetMultipleLeads());
    record.put(1, "[email protected]");
}
 
Example #19
Source File: KafkaAvroMessageDecoder.java    From HiveKa with Apache License 2.0 6 votes vote down vote up
public AvroGenericRecordWritable decode(byte[] payload) {
	try {
		MessageDecoderHelper helper = new MessageDecoderHelper(registry,
				topicName, payload).invoke();
     DatumReader<Record> reader = new GenericDatumReader<Record>(helper.getTargetSchema());

     log.debug("Trying to read kafka payload");
     log.debug("buffer: " + helper.getBuffer());
     log.debug("start: " + helper.getStart());
     log.debug("length: " + helper.getLength());
     log.debug("target schema: " + helper.getTargetSchema());
     log.debug("schema: " + helper.getSchema());
	  GenericRecord record = reader.read(null, decoderFactory.binaryDecoder(helper.getBuffer().array(),
         helper.getStart(), helper.getLength(), null));
     log.debug("Read kafka payload as " + record);

     AvroGenericRecordWritable grw = new AvroGenericRecordWritable(record);
     grw.setFileSchema(latestSchema);

     return grw;

	} catch (IOException e) {
		throw new MessageDecoderException(e);
	}
}
 
Example #20
Source File: TestFileSystemDataset.java    From kite with Apache License 2.0 6 votes vote down vote up
@Test
public void testWriteAndRead() throws IOException {
  FileSystemDataset<Record> ds = new FileSystemDataset.Builder<Record>()
      .namespace("ns")
      .name("test")
      .configuration(getConfiguration())
      .descriptor(new DatasetDescriptor.Builder()
          .schemaUri(USER_SCHEMA_URL)
          .format(format)
          .compressionType(compressionType)
          .location(testDirectory)
          .build())
      .type(Record.class)
      .build();

  Assert.assertFalse("Dataset is not partitioned", ds.getDescriptor()
    .isPartitioned());

  writeTestUsers(ds, 10);
  checkTestUsers(ds, 10);
}
 
Example #21
Source File: DerivedColumnNoTransformationTest.java    From incubator-pinot with Apache License 2.0 6 votes vote down vote up
private List<GenericRecord> generateTestData() throws Exception {
  Schema schema = new Schema.Parser().parse(ClassLoader.getSystemResourceAsStream(AVRO_SCHEMA));
  List<GenericRecord> inputRecords = new ArrayList<GenericRecord>();

  GenericRecord input = new GenericData.Record(schema);
  input.put("d1", "abc1");
  input.put("d2", 501L);
  input.put("d3", "xyz1");
  input.put("hoursSinceEpoch", generateRandomHoursSinceEpoch());
  input.put("m1", 10);
  input.put("m2", 20);
  inputRecords.add(input);

  input = new GenericData.Record(schema);
  input.put("d1", "abc2");
  input.put("d2", 502L);
  input.put("d3", "xyz2");
  input.put("hoursSinceEpoch", generateRandomHoursSinceEpoch());
  input.put("m1", 10);
  input.put("m2", 20);
  inputRecords.add(input);

  return inputRecords;
}
 
Example #22
Source File: TestLocalDatasetURIs.java    From kite with Apache License 2.0 6 votes vote down vote up
@Test
public void testRelative() {
  DatasetRepository repo = DatasetRepositories.repositoryFor("repo:file:target/data");
  repo.delete("ns", "test");
  repo.create("ns", "test", descriptor);

  Dataset<Record> ds = Datasets.<Record, Dataset<Record>>
      load("dataset:file:target/data/ns/test", Record.class);

  Assert.assertNotNull("Should load dataset", ds);
  Assert.assertTrue(ds instanceof FileSystemDataset);
  Path cwd = localFS.makeQualified(new Path("."));
  Assert.assertEquals("Locations should match",
      new Path(cwd, "target/data/ns/test").toUri(), ds.getDescriptor().getLocation());
  Assert.assertEquals("Descriptors should match",
      repo.load("ns", "test").getDescriptor(), ds.getDescriptor());
  Assert.assertEquals("Should report correct namespace",
      "ns", ds.getNamespace());
  Assert.assertEquals("Should report correct name",
      "test", ds.getName());

  repo.delete("ns", "test");
}
 
Example #23
Source File: TestReadProjection.java    From iceberg with Apache License 2.0 6 votes vote down vote up
@Test
public void testRename() throws Exception {
  Schema writeSchema = new Schema(
      Types.NestedField.required(0, "id", Types.LongType.get()),
      Types.NestedField.optional(1, "data", Types.StringType.get())
  );

  Record record = new Record(AvroSchemaUtil.convert(writeSchema, "table"));
  record.put("id", 34L);
  record.put("data", "test");

  Schema readSchema = new Schema(
      Types.NestedField.required(0, "id", Types.LongType.get()),
      Types.NestedField.optional(1, "renamed", Types.StringType.get())
  );

  Record projected = writeAndRead("project_and_rename", writeSchema, readSchema, record);

  Assert.assertEquals("Should contain the correct id value", 34L, (long) projected.get("id"));
  int cmp = Comparators.charSequences()
      .compare("test", (CharSequence) projected.get("renamed"));
  Assert.assertTrue("Should contain the correct data/renamed value", cmp == 0);
}
 
Example #24
Source File: MarketoOutputWriterTest.java    From components with Apache License 2.0 6 votes vote down vote up
@Test
public void testWriteCustomObject() throws Exception {
    props.outputOperation.setValue(OutputOperation.deleteCustomObjects);
    props.updateSchemaRelated();
    when(sink.getProperties()).thenReturn(props);
    doReturn(getSuccessSyncResult("deleted")).when(client).deleteCustomObjects(any(TMarketoOutputProperties.class),
            any(List.class));
    IndexedRecord record = new Record(MarketoConstants.getRESTOutputSchemaForSyncLead());
    record.put(0, 12345);
    writer.open("test");
    writer.write(record);
    assertNotNull(writer.close());
    //
    props.outputOperation.setValue(OutputOperation.syncCustomObjects);
    when(sink.getProperties()).thenReturn(props);
    doReturn(getSuccessSyncResult("updated")).when(client).syncCustomObjects(any(TMarketoOutputProperties.class),
            any(List.class));
    writer.open("test");
    writer.write(record);
    assertNotNull(writer.close());
}
 
Example #25
Source File: TestReadProjection.java    From iceberg with Apache License 2.0 6 votes vote down vote up
@Test
public void testEmptyProjection() throws Exception {
  Schema schema = new Schema(
      Types.NestedField.required(0, "id", Types.LongType.get()),
      Types.NestedField.optional(1, "data", Types.StringType.get())
  );

  Record record = new Record(AvroSchemaUtil.convert(schema, "table"));
  record.put("id", 34L);
  record.put("data", "test");

  Record projected = writeAndRead("empty_projection", schema, schema.select(), record);

  Assert.assertNotNull("Should read a non-null record", projected);
  try {
    projected.get(0);
    Assert.fail("Should not retrieve value with ordinal 0");
  } catch (ArrayIndexOutOfBoundsException e) {
    // this is expected because there are no values
  }
}
 
Example #26
Source File: BigQueryConvertersTest.java    From DataflowTemplates with Apache License 2.0 6 votes vote down vote up
/**
 * Tests that {@link BigQueryConverters.AvroToEntity} creates an Entity without a valid key when a
 * field is of type Record.
 */
@Test
public void testAvroToEntityRecordField() throws Exception {
  // Create test data
  TableFieldSchema column = generateNestedTableFieldSchema();
  List<TableFieldSchema> fields = new ArrayList<>();
  fields.add(column);
  TableSchema bqSchema = new TableSchema().setFields(fields);
  Record record = generateNestedAvroRecord();
  SchemaAndRecord inputBqData = new SchemaAndRecord(record, bqSchema);
  // Run the test
  Entity outputEntity = converter.apply(inputBqData);
  // Assess results
  String expectedCauseMessage = String.format("Column [address] of type [RECORD] not supported.");
  assertTrue(!outputEntity.hasKey());
  assertEquals(
      expectedCauseMessage, outputEntity.getPropertiesMap().get("cause").getStringValue());
  assertEquals(record.toString(), outputEntity.getPropertiesMap().get("row").getStringValue());
}
 
Example #27
Source File: AvroTestHelpers.java    From iceberg with Apache License 2.0 5 votes vote down vote up
static void assertEquals(Types.StructType struct, Record expected, Record actual) {
  List<Types.NestedField> fields = struct.fields();
  for (int i = 0; i < fields.size(); i += 1) {
    Type fieldType = fields.get(i).type();

    Object expectedValue = expected.get(i);
    Object actualValue = actual.get(i);

    assertEquals(fieldType, expectedValue, actualValue);
  }
}
 
Example #28
Source File: TestGenericAvro.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
protected void writeAndValidate(Schema schema) throws IOException {
  List<Record> expected = RandomAvroData.generate(schema, 100, 0L);

  File testFile = temp.newFile();
  Assert.assertTrue("Delete should succeed", testFile.delete());

  try (FileAppender<Record> writer = Avro.write(Files.localOutput(testFile))
      .schema(schema)
      .named("test")
      .build()) {
    for (Record rec : expected) {
      writer.add(rec);
    }
  }

  List<Record> rows;
  try (AvroIterable<Record> reader = Avro.read(Files.localInput(testFile))
      .project(schema)
      .build()) {
    rows = Lists.newArrayList(reader);
  }

  for (int i = 0; i < expected.size(); i += 1) {
    AvroTestHelpers.assertEquals(schema.asStruct(), expected.get(i), rows.get(i));
  }
}
 
Example #29
Source File: TestFileSystemDataset.java    From kite with Apache License 2.0 5 votes vote down vote up
@Test(expected = ValidationException.class)
public void testCannotMergeDatasetsWithDifferentPartitionStrategies() throws IOException {
  FileSystemDataset<Record> ds = new FileSystemDataset.Builder<Record>()
      .namespace("ns")
      .name("users")
      .configuration(getConfiguration())
      .descriptor(new DatasetDescriptor.Builder()
          .schema(USER_SCHEMA)
          .location(testDirectory)
          .partitionStrategy(new PartitionStrategy.Builder()
              .hash("username", 2).build())
          .build())
      .type(Record.class)
      .build();
  FileSystemDataset<Record> dsUpdate = new FileSystemDataset.Builder<Record>()
      .namespace("ns")
      .name("users")
      .configuration(getConfiguration())
      .descriptor(new DatasetDescriptor.Builder()
          .schema(USER_SCHEMA)
          .location(testDirectory)
          .partitionStrategy(new PartitionStrategy.Builder()
              .hash("username", 2).hash("email", 3).build())
          .build())
      .type(Record.class)
      .build();
  ds.merge(dsUpdate);
}
 
Example #30
Source File: TestReadProjection.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Test
public void testListProjection() throws IOException {
  Schema writeSchema = new Schema(
      Types.NestedField.required(0, "id", Types.LongType.get()),
      Types.NestedField.optional(10, "values",
          Types.ListType.ofOptional(11, Types.LongType.get()))
  );

  List<Long> values = ImmutableList.of(56L, 57L, 58L);

  Record record = new Record(AvroSchemaUtil.convert(writeSchema, "table"));
  record.put("id", 34L);
  record.put("values", values);

  Schema idOnly = new Schema(
      Types.NestedField.required(0, "id", Types.LongType.get())
  );

  Record projected = writeAndRead("id_only", writeSchema, idOnly, record);
  Assert.assertEquals("Should contain the correct id value", 34L, (long) projected.get("id"));
  Assert.assertNull("Should not project values list", projected.get("values"));

  Schema elementOnly = writeSchema.select("values.element");
  projected = writeAndRead("element_only", writeSchema, elementOnly, record);
  Assert.assertNull("Should not project id", projected.get("id"));
  Assert.assertEquals("Should project entire list", values, projected.get("values"));

  Schema listOnly = writeSchema.select("values");
  projected = writeAndRead("list_only", writeSchema, listOnly, record);
  Assert.assertNull("Should not project id", projected.get("id"));
  Assert.assertEquals("Should project entire list", values, projected.get("values"));
}