Java Code Examples for org.apache.avro.Schema#Parser
The following examples show how to use
org.apache.avro.Schema#Parser .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AvroCompositeSchemasTest.java From registry with Apache License 2.0 | 7 votes |
private void doTestSchemaResolution(String givenSchemaLocation, String expectedSchemaLocation) throws IOException { AvroSchemaResolver avroSchemaResolver = new AvroSchemaResolver(null); Schema schema = new Schema.Parser().parse(getResourceText(givenSchemaLocation)); LOG.info("schema = %s", schema); Schema effectiveSchema = avroSchemaResolver.handleUnionFieldsWithNull(schema, new HashSet<>()); LOG.info("effectiveSchema = %s", effectiveSchema); String returnedSchemaText = effectiveSchema.toString(); Assert.assertEquals(getResourceText(expectedSchemaLocation).replace(" ", ""), returnedSchemaText.replace(" ", "")); // double check whether the effective schema is semantically right parsing Schema.Parser parser = new Schema.Parser(); Schema parsedReturnedSchema = parser.parse(returnedSchemaText); Assert.assertEquals(effectiveSchema, parsedReturnedSchema); }
Example 2
Source File: AvroSchemaTest.java From pulsar with Apache License 2.0 | 6 votes |
@Test public void testNotAllowNullSchema() throws JSONException { AvroSchema<Foo> avroSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).withAlwaysAllowNull(false).build()); assertEquals(avroSchema.getSchemaInfo().getType(), SchemaType.AVRO); Schema.Parser parser = new Schema.Parser(); String schemaJson = new String(avroSchema.getSchemaInfo().getSchema()); assertJSONEquals(schemaJson, SCHEMA_AVRO_NOT_ALLOW_NULL); Schema schema = parser.parse(schemaJson); for (String fieldName : FOO_FIELDS) { Schema.Field field = schema.getField(fieldName); Assert.assertNotNull(field); if (field.name().equals("field4")) { Assert.assertNotNull(field.schema().getTypes().get(1).getField("field1")); } if (field.name().equals("fieldUnableNull")) { Assert.assertNotNull(field.schema().getType()); } } }
Example 3
Source File: FastDeserializerDefaultsTest.java From avro-fastserde with Apache License 2.0 | 6 votes |
@Test public void shouldReadSpecificLikeSlow() throws IOException { // given Schema.Parser parser = new Schema.Parser(); Schema oldRecordSchema = parser.parse(this.getClass().getResourceAsStream("/schema/defaultsTestOld.avsc")); GenericData.Record oldRecord = new GenericData.Record(oldRecordSchema); GenericData.Record oldSubRecord = new GenericData.Record(oldRecordSchema.getField("oldSubRecord").schema()); oldSubRecord.put("oldSubField", "testValueOfSubField"); oldSubRecord.put("fieldToBeRemoved", 33); oldRecord.put("oldSubRecord", oldSubRecord); // when DefaultsTestRecord testRecordSlow = deserializeSpecificSlow(DefaultsTestRecord.getClassSchema(), oldRecordSchema, serializeGeneric(oldRecord)); DefaultsTestRecord testRecordFast = deserializeSpecificFast(DefaultsTestRecord.getClassSchema(), oldRecordSchema, serializeGeneric(oldRecord)); // then Assert.assertEquals(testRecordSlow, testRecordFast); }
Example 4
Source File: AvroEventSerializer.java From Transwarp-Sample-Code with MIT License | 6 votes |
private Schema loadFromUrl(String schemaUrl) throws IOException { Configuration conf = new Configuration(); Schema.Parser parser = new Schema.Parser(); if (schemaUrl.toLowerCase(Locale.ENGLISH).startsWith("hdfs://")) { FileSystem fs = FileSystem.get(conf); FSDataInputStream input = null; try { input = fs.open(new Path(schemaUrl)); return parser.parse(input); } finally { if (input != null) { input.close(); } } } else { InputStream is = null; try { is = new URL(schemaUrl).openStream(); return parser.parse(is); } finally { if (is != null) { is.close(); } } } }
Example 5
Source File: DatasetWritePayload.java From components with Apache License 2.0 | 5 votes |
private static Schema readAvroSchema(JsonParser parser) throws IOException { JsonToken avroSchemaFieldToken = parser.nextToken(); isTrue(FIELD_NAME == avroSchemaFieldToken, invalidInputMessage(FIELD_NAME,avroSchemaFieldToken)); isTrue(Objects.equals(AVRO_SCHEMA_FIELD_NAME, parser.getText()), invalidInputMessage(AVRO_SCHEMA_FIELD_NAME,parser.getText())); JsonToken configFieldObjectStartToken = parser.nextToken(); isTrue(START_OBJECT == configFieldObjectStartToken, invalidInputMessage(START_OBJECT,configFieldObjectStartToken)); // This code is so awful I will certainly have cancer ObjectNode schemaAsJson = parser.readValueAsTree(); Schema.Parser avroSchemaParser = new Schema.Parser(); return avroSchemaParser.parse(new ObjectMapper().writeValueAsString(schemaAsJson)); }
Example 6
Source File: AvroSchemaFieldsGeneratorTest.java From registry with Apache License 2.0 | 5 votes |
@Test public void testComplexSchemaRead() throws Exception { AvroFieldsGenerator avroFieldsGenerator = new AvroFieldsGenerator(); try (InputStream schemaStream = this.getClass().getResourceAsStream("/schema-1.avsc");) { Schema.Parser parser = new Schema.Parser(); Schema schema = parser.parse(schemaStream); List<SchemaFieldInfo> schemaFieldInfos = avroFieldsGenerator.generateFields(schema); Assert.assertEquals(schemaFieldInfos.size(), 12); } }
Example 7
Source File: FileWriter.java From SPADE with GNU General Public License v3.0 | 5 votes |
public FileWriter(String schemaFile, String outputFile) throws IOException{ Parser parser = new Schema.Parser(); Schema schema = parser.parse(new File(schemaFile)); DatumWriter<Object> datumWriter = new SpecificDatumWriter<Object>(schema); fileWriter = new DataFileWriter<>(datumWriter); fileWriter.create(schema, new File(outputFile)); }
Example 8
Source File: AvroCoderCloudObjectTranslator.java From beam with Apache License 2.0 | 5 votes |
@Override public AvroCoder<?> fromCloudObject(CloudObject cloudObject) { Schema.Parser parser = new Schema.Parser(); String className = Structs.getString(cloudObject, TYPE_FIELD); String schemaString = Structs.getString(cloudObject, SCHEMA_FIELD); try { Class<?> type = Class.forName(className); Schema schema = parser.parse(schemaString); return AvroCoder.of(type, schema); } catch (ClassNotFoundException e) { throw new IllegalArgumentException(e); } }
Example 9
Source File: AggregateCombineFn.java From components with Apache License 2.0 | 5 votes |
public IndexedRecord extractOutput() { Schema.Parser parser = new Schema.Parser(); Schema outputFieldSchema = parser.parse(outputFieldSchemaStr); GenericData.Record outputFieldRecord = new GenericData.Record(outputFieldSchema); AggregateUtils.setField(outputFieldPath, this.accumulatorFn.extractOutput(), outputFieldRecord); return outputFieldRecord; }
Example 10
Source File: GeneratorDemo.java From structured-streaming-avro-demo with BSD 3-Clause "New" or "Revised" License | 5 votes |
/** * * @param args * @throws InterruptedException */ public static void main(String[] args) throws InterruptedException { Properties props = new Properties(); props.put("bootstrap.servers", "localhost:9092"); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer"); Schema.Parser parser = new Schema.Parser(); Schema schema = parser.parse(USER_SCHEMA); System.out.println(schema.toString(true)); Injection<GenericRecord, byte[]> recordInjection = GenericAvroCodecs.toBinary(schema); KafkaProducer<String, byte[]> producer = new KafkaProducer<>(props); SplittableRandom random = new SplittableRandom(); int count = 0; while (true) { GenericData.Record avroRecord = new GenericData.Record(schema); avroRecord.put("str1", "Str 1-" + random.nextInt(10)); avroRecord.put("str2", "Str 2-" + random.nextInt(1000)); avroRecord.put("int1", random.nextInt(10000)); GenericData.Record packetInfo = new GenericData.Record(schema.getField("packet_info").schema()); packetInfo.put("demo", "value"); avroRecord.put("packet_info", packetInfo); byte[] bytes = recordInjection.apply(avroRecord); ProducerRecord<String, byte[]> record = new ProducerRecord<>("mytopic", bytes); producer.send(record); count++; if (count % 300000 == 0) { Thread.sleep(500); } } }
Example 11
Source File: ActivityJsonToAvroInterceptor.java From big-data-lite with MIT License | 5 votes |
/** *Loads a schema from a URL * @param schemaUrl * @return Schema * @throws IOException */ private Schema loadFromUrl(String schemaUrl) throws IOException { Configuration conf = new Configuration(); Schema.Parser parser = new Schema.Parser(); if (schemaUrl.toLowerCase().startsWith("hdfs://")) { FileSystem fs = FileSystem.get(conf); FSDataInputStream input = null; try { input = fs.open(new Path(schemaUrl)); return parser.parse(input); } finally { if (input != null) { input.close(); } } } else { InputStream is = null; try { is = new URL(schemaUrl).openStream(); return parser.parse(is); } finally { if (is != null) { is.close(); } } } }
Example 12
Source File: AvroGenericRecordBolt.java From jstorm with Apache License 2.0 | 5 votes |
@Override protected void doPrepare(Map conf, TopologyContext topologyContext, OutputCollector collector) throws IOException { LOG.info("Preparing AvroGenericRecord Bolt..."); this.fs = FileSystem.get(URI.create(this.fsUrl), hdfsConfig); Schema.Parser parser = new Schema.Parser(); this.schema = parser.parse(this.schemaAsString); }
Example 13
Source File: AvroContentValidator.java From apicurio-registry with Apache License 2.0 | 5 votes |
/** * @see io.apicurio.registry.rules.validity.ContentValidator#validate(io.apicurio.registry.rules.validity.ValidityLevel, ContentHandle) */ @Override public void validate(ValidityLevel level, ContentHandle artifactContent) throws InvalidContentException { if (level == ValidityLevel.SYNTAX_ONLY || level == ValidityLevel.FULL) { try { Schema.Parser parser = new Schema.Parser(); parser.parse(artifactContent.content()); } catch (Exception e) { throw new InvalidContentException("Syntax violation for Avro artifact.", e); } } }
Example 14
Source File: AvroSchemaUtils.java From apicurio-registry with Apache License 2.0 | 4 votes |
private static Schema createPrimitiveSchema(Schema.Parser parser, String type) { String schemaString = String.format("{\"type\" : \"%s\"}", type); return parser.parse(schemaString); }
Example 15
Source File: Enums.java From snowflake-kafka-connector with Apache License 2.0 | 4 votes |
public Schema getSchema() { Schema.Parser parser = new Schema.Parser(); return parser.parse(schema); }
Example 16
Source File: FastDeserializerDefaultsTest.java From avro-fastserde with Apache License 2.0 | 4 votes |
@Test @SuppressWarnings("unchecked") public void shouldReadSpecificDefaults() throws IOException { // given Schema.Parser parser = new Schema.Parser(); Schema oldRecordSchema = parser.parse(this.getClass().getResourceAsStream("/schema/defaultsTestOld.avsc")); GenericData.Record oldRecord = new GenericData.Record(oldRecordSchema); GenericData.Record oldSubRecord = new GenericData.Record(oldRecordSchema.getField("oldSubRecord").schema()); oldSubRecord.put("oldSubField", "testValueOfSubField"); oldSubRecord.put("fieldToBeRemoved", 33); oldRecord.put("oldSubRecord", oldSubRecord); // when DefaultsTestRecord testRecord = deserializeSpecificFast(DefaultsTestRecord.getClassSchema(), oldRecordSchema, serializeGeneric(oldRecord)); // then Assert.assertEquals(oldSubRecord.get("oldSubField"), ((OldSubRecord) testRecord.get("oldSubRecord")).get("oldSubField")); Assert.assertEquals("defaultOldSubField", ((OldSubRecord) testRecord.get("newFieldWithOldSubRecord")).get("oldSubField")); Assert.assertEquals(42, (int) testRecord.getTestInt()); Assert.assertNull(testRecord.getTestIntUnion()); Assert.assertEquals(9223372036854775807L, (long) testRecord.getTestLong()); Assert.assertNull(testRecord.getTestLongUnion()); Assert.assertEquals(3.14d, testRecord.getTestDouble(), 0); Assert.assertNull(testRecord.getTestDoubleUnion()); Assert.assertEquals(3.14f, testRecord.getTestFloat(), 0); Assert.assertNull(testRecord.getTestFloatUnion()); Assert.assertEquals(true, testRecord.getTestBoolean()); Assert.assertNull(testRecord.getTestBooleanUnion()); Assert.assertEquals(ByteBuffer.wrap(new byte[]{0, 1, 2, 3}), testRecord.getTestBytes()); Assert.assertNull(testRecord.getTestBytesUnion()); Assert.assertEquals("testStringValue", testRecord.getTestString()); Assert.assertEquals(new URL("http://www.example.com"), testRecord.getTestStringable()); Assert.assertNull(testRecord.getTestStringUnion()); Assert.assertEquals(new DefaultsFixed(new byte[]{(byte) 0xFF}), testRecord.getTestFixed()); Assert.assertNull(testRecord.getTestFixedUnion()); Assert.assertEquals(Collections.singletonList(new DefaultsFixed(new byte[]{(byte) 0xFA})), testRecord.getTestFixedArray()); List listWithNull = new LinkedList(); listWithNull.add(null); Assert.assertEquals(listWithNull, testRecord.getTestFixedUnionArray()); Assert.assertEquals(DefaultsEnum.C, testRecord.getTestEnum()); Assert.assertNull(testRecord.getTestEnumUnion()); Assert.assertEquals(Collections.singletonList(Collections.singletonList(DefaultsNewEnum.B)), testRecord.getTestNewEnumIntUnionArray()); Assert.assertEquals(Arrays.asList(DefaultsEnum.E, DefaultsEnum.B), testRecord.getTestEnumArray()); Assert.assertEquals(listWithNull, testRecord.getTestEnumUnionArray()); Assert.assertNull(testRecord.getSubRecordUnion()); Assert.assertEquals(DefaultsSubRecord.newBuilder().setSubField("valueOfSubField") .setArrayField(Collections.singletonList(DefaultsEnum.A)).build(), testRecord.getSubRecord()); Assert.assertEquals(Collections.singletonList(DefaultsSubRecord.newBuilder().setSubField("recordArrayValue") .setArrayField(Collections.singletonList(DefaultsEnum.A)).build()), testRecord.getRecordArray()); Assert.assertEquals(listWithNull, testRecord.getRecordUnionArray()); Map stringableMap = new HashMap(); stringableMap.put(new URL("http://www.example2.com"), new BigInteger("123")); Assert.assertEquals(stringableMap, testRecord.getStringableMap()); Map recordMap = new HashMap(); recordMap.put("test", DefaultsSubRecord.newBuilder().setSubField("recordMapValue") .setArrayField(Collections.singletonList(DefaultsEnum.A)).build()); Assert.assertEquals(recordMap, testRecord.getRecordMap()); Map recordUnionMap = new HashMap(); recordUnionMap.put("test", null); Assert.assertEquals(recordUnionMap, testRecord.getRecordUnionMap()); Assert.assertEquals(Collections.singletonList(recordUnionMap), testRecord.getRecordUnionMapArray()); Map recordUnionArrayMap = new HashMap(); recordUnionArrayMap.put("test", listWithNull); Assert.assertEquals(recordUnionArrayMap, testRecord.getRecordUnionArrayMap()); }
Example 17
Source File: AvroAsJsonOutputFormat.java From iow-hadoop-streaming with Apache License 2.0 | 4 votes |
@Override public RecordWriter<Text, NullWritable> getRecordWriter(FileSystem ignore, JobConf job, String name, Progressable prog) throws IOException { Schema schema; Schema.Parser p = new Schema.Parser(); String strSchema = job.get("iow.streaming.output.schema"); if (strSchema == null) { String schemaFile = job.get("iow.streaming.output.schema.file", "streaming_output_schema"); if (job.getBoolean("iow.streaming.schema.use.prefix", false)) { // guess schema from file name // format is: schema:filename // with special keyword default - 'default:filename' String str[] = name.split(":"); if (!str[0].equals("default")) schemaFile = str[0]; name = str[1]; } LOG.info(this.getClass().getSimpleName() + ": Using schema from file: " + schemaFile); File f = new File(schemaFile); schema = p.parse(f); } else { LOG.info(this.getClass().getSimpleName() + ": Using schema from jobconf."); schema = p.parse(strSchema); } if (schema == null) { throw new IOException("Can't find proper output schema"); } DataFileWriter<GenericRecord> writer = new DataFileWriter<GenericRecord>(new GenericDatumWriter<GenericRecord>()); configureDataFileWriter(writer, job); Path path = FileOutputFormat.getTaskOutputPath(job, name + org.apache.avro.mapred.AvroOutputFormat.EXT); writer.create(schema, path.getFileSystem(job).create(path)); return createRecordWriter(writer, schema); }
Example 18
Source File: SimpleAvroProducer.java From landoop-avro-generator with Apache License 2.0 | 4 votes |
private static Schema getSchema(String schemaString) { Schema.Parser parser = new Schema.Parser(); return parser.parse(schemaString.replace('`', '"')); }
Example 19
Source File: AvroNestedCheckerTest.java From registry with Apache License 2.0 | 4 votes |
@BeforeClass public static void beforeClass() throws IOException { Schema.Parser schemaParser = new Schema.Parser(); simpleNestedSchema = schemaParser.parse(AvroNestedCheckerTest.class.getResourceAsStream("/avro/nested/nested-simple.avsc")); complexNestedSchema = schemaParser.parse(AvroNestedCheckerTest.class.getResourceAsStream("/avro/nested/nested-complex.avsc")); }
Example 20
Source File: AvroSchemaUtils.java From apicurio-registry with Apache License 2.0 | 4 votes |
private static Schema createPrimitiveSchema(Schema.Parser parser, String type) { String schemaString = String.format("{\"type\" : \"%s\"}", type); return parser.parse(schemaString); }