org.codehaus.jackson.JsonEncoding Java Examples
The following examples show how to use
org.codehaus.jackson.JsonEncoding.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: StatePool.java From hadoop with Apache License 2.0 | 6 votes |
private void write(DataOutput out) throws IOException { // This is just a JSON experiment System.out.println("Dumping the StatePool's in JSON format."); ObjectMapper outMapper = new ObjectMapper(); outMapper.configure( SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true); // define a module SimpleModule module = new SimpleModule("State Serializer", new Version(0, 1, 1, "FINAL")); // add the state serializer //module.addSerializer(State.class, new StateSerializer()); // register the module with the object-mapper outMapper.registerModule(module); JsonFactory outFactory = outMapper.getJsonFactory(); JsonGenerator jGen = outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8); jGen.useDefaultPrettyPrinter(); jGen.writeObject(this); jGen.close(); }
Example #2
Source File: RecordWithMetadataToEnvelopedRecordWithMetadata.java From incubator-gobblin with Apache License 2.0 | 6 votes |
@Override public Iterable<RecordWithMetadata<byte[]>> convertRecord(String outputSchema, RecordWithMetadata<?> inputRecord, WorkUnitState workUnit) throws DataConversionException { try { updateRecordMetadata(inputRecord); ByteArrayOutputStream bOs = new ByteArrayOutputStream(512); try (JsonGenerator generator = jsonFactory.createJsonGenerator(bOs, JsonEncoding.UTF8).setCodec(objectMapper)) { generator.writeStartObject(); writeHeaders(inputRecord, generator); writeRecord(inputRecord, generator); generator.writeEndObject(); } return Collections.singleton(new RecordWithMetadata<byte[]>(bOs.toByteArray(), inputRecord.getMetadata())); } catch (IOException e) { throw new DataConversionException(e); } }
Example #3
Source File: DataObjectCacheGenerator.java From FoxBPM with Apache License 2.0 | 6 votes |
public void generate(ZipOutputStream out) { log.debug("开始处理bizData.data..."); try{ List<Map<String,Object>> list = FoxBpmUtil.getProcessEngine().getModelService().getAllBizObjects(); ObjectMapper objectMapper = new ObjectMapper(); JsonGenerator jsonGenerator = objectMapper.getJsonFactory().createJsonGenerator(out, JsonEncoding.UTF8); String tmpEntryName = "cache/bizData.data"; ZipEntry zipEntry = new ZipEntry(tmpEntryName); zipEntry.setMethod(ZipEntry.DEFLATED);// 设置条目的压缩方式 out.putNextEntry(zipEntry); jsonGenerator.writeObject(list); out.closeEntry(); log.debug("处理bizData.data文件完毕"); }catch(Exception ex){ log.error("解析bizData.data文件失败!生成zip文件失败!"); throw new FoxBPMException("解析bizData.data文件失败",ex); } }
Example #4
Source File: GroupDefinitionsCacheGenerator.java From FoxBPM with Apache License 2.0 | 6 votes |
public void generate(ZipOutputStream out) { log.debug("开始处理GroupDefinitions.data..."); try{ List<GroupDefinition> groupDefinitions = FoxBpmUtil.getProcessEngine().getIdentityService().getAllGroupDefinitions(); Map<String,Object> resultMap = new HashMap<String, Object>(); resultMap.put("data", groupDefinitions); ObjectMapper objectMapper = new ObjectMapper(); JsonGenerator jsonGenerator = objectMapper.getJsonFactory().createJsonGenerator(out, JsonEncoding.UTF8); String tmpEntryName = "cache/allGroupDefinitions.data"; ZipEntry zipEntry = new ZipEntry(tmpEntryName); zipEntry.setMethod(ZipEntry.DEFLATED);// 设置条目的压缩方式 out.putNextEntry(zipEntry); jsonGenerator.writeObject(resultMap); out.closeEntry(); log.debug("处理GroupDefinitions.data文件完毕"); }catch(Exception ex){ log.error("解析GroupDefinitions.data文件失败!生成zip文件失败!"); throw new FoxBPMException("解析GroupDefinitions.data文件失败",ex); } }
Example #5
Source File: TaskCommandDefinitionsGenerator.java From FoxBPM with Apache License 2.0 | 6 votes |
public void generate(ZipOutputStream out) { try { log.debug("开始处理taskCommandDefinition.data文件。。。"); ProcessEngineConfigurationImpl processEngineConfigurationImpl = FoxBpmUtil.getProcessEngine().getProcessEngineConfiguration(); List<TaskCommandDefinition> list = processEngineConfigurationImpl.getTaskCommandDefinitions(); ObjectMapper objectMapper = new ObjectMapper(); JsonGenerator jsonGenerator = objectMapper.getJsonFactory().createJsonGenerator(out, JsonEncoding.UTF8); String tmpEntryName = "cache/taskCommandDefinition.data"; ZipEntry zipEntry = new ZipEntry(tmpEntryName); zipEntry.setMethod(ZipEntry.DEFLATED);// 设置条目的压缩方式 out.putNextEntry(zipEntry); jsonGenerator.writeObject(list); out.closeEntry(); log.debug("处理taskCommandDefinition.data文件完毕"); } catch (Exception ex) { log.error("解析taskCommandDefinition.data文件失败!生成zip文件失败!"); throw new FoxBPMException("解析taskCommandDefinition.data文件失败", ex); } }
Example #6
Source File: Display.java From big-c with Apache License 2.0 | 6 votes |
public AvroFileInputStream(FileStatus status) throws IOException { pos = 0; buffer = new byte[0]; GenericDatumReader<Object> reader = new GenericDatumReader<Object>(); FileContext fc = FileContext.getFileContext(new Configuration()); fileReader = DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader); Schema schema = fileReader.getSchema(); writer = new GenericDatumWriter<Object>(schema); output = new ByteArrayOutputStream(); JsonGenerator generator = new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8); MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter(); prettyPrinter.setRootValueSeparator(System.getProperty("line.separator")); generator.setPrettyPrinter(prettyPrinter); encoder = EncoderFactory.get().jsonEncoder(schema, generator); }
Example #7
Source File: Anonymizer.java From big-c with Apache License 2.0 | 6 votes |
private JsonGenerator createJsonGenerator(Configuration conf, Path path) throws IOException { FileSystem outFS = path.getFileSystem(conf); CompressionCodec codec = new CompressionCodecFactory(conf).getCodec(path); OutputStream output; Compressor compressor = null; if (codec != null) { compressor = CodecPool.getCompressor(codec); output = codec.createOutputStream(outFS.create(path), compressor); } else { output = outFS.create(path); } JsonGenerator outGen = outFactory.createJsonGenerator(output, JsonEncoding.UTF8); outGen.useDefaultPrettyPrinter(); return outGen; }
Example #8
Source File: StatePool.java From big-c with Apache License 2.0 | 6 votes |
private void write(DataOutput out) throws IOException { // This is just a JSON experiment System.out.println("Dumping the StatePool's in JSON format."); ObjectMapper outMapper = new ObjectMapper(); outMapper.configure( SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true); // define a module SimpleModule module = new SimpleModule("State Serializer", new Version(0, 1, 1, "FINAL")); // add the state serializer //module.addSerializer(State.class, new StateSerializer()); // register the module with the object-mapper outMapper.registerModule(module); JsonFactory outFactory = outMapper.getJsonFactory(); JsonGenerator jGen = outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8); jGen.useDefaultPrettyPrinter(); jGen.writeObject(this); jGen.close(); }
Example #9
Source File: JsonObjectMapperWriter.java From big-c with Apache License 2.0 | 6 votes |
public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws IOException { ObjectMapper mapper = new ObjectMapper(); mapper.configure( SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true); // define a module SimpleModule module = new SimpleModule("Default Serializer", new Version(0, 1, 1, "FINAL")); // add various serializers to the module // add default (all-pass) serializer for all rumen specific data types module.addSerializer(DataType.class, new DefaultRumenSerializer()); // add a serializer to use object.toString() while serializing module.addSerializer(ID.class, new ObjectStringSerializer<ID>()); // register the module with the object-mapper mapper.registerModule(module); mapper.getJsonFactory(); writer = mapper.getJsonFactory().createJsonGenerator( output, JsonEncoding.UTF8); if (prettyPrint) { writer.useDefaultPrettyPrinter(); } }
Example #10
Source File: Display.java From hadoop with Apache License 2.0 | 6 votes |
public AvroFileInputStream(FileStatus status) throws IOException { pos = 0; buffer = new byte[0]; GenericDatumReader<Object> reader = new GenericDatumReader<Object>(); FileContext fc = FileContext.getFileContext(new Configuration()); fileReader = DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader); Schema schema = fileReader.getSchema(); writer = new GenericDatumWriter<Object>(schema); output = new ByteArrayOutputStream(); JsonGenerator generator = new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8); MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter(); prettyPrinter.setRootValueSeparator(System.getProperty("line.separator")); generator.setPrettyPrinter(prettyPrinter); encoder = EncoderFactory.get().jsonEncoder(schema, generator); }
Example #11
Source File: Anonymizer.java From hadoop with Apache License 2.0 | 6 votes |
private JsonGenerator createJsonGenerator(Configuration conf, Path path) throws IOException { FileSystem outFS = path.getFileSystem(conf); CompressionCodec codec = new CompressionCodecFactory(conf).getCodec(path); OutputStream output; Compressor compressor = null; if (codec != null) { compressor = CodecPool.getCompressor(codec); output = codec.createOutputStream(outFS.create(path), compressor); } else { output = outFS.create(path); } JsonGenerator outGen = outFactory.createJsonGenerator(output, JsonEncoding.UTF8); outGen.useDefaultPrettyPrinter(); return outGen; }
Example #12
Source File: JsonObjectMapperWriter.java From hadoop with Apache License 2.0 | 6 votes |
public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws IOException { ObjectMapper mapper = new ObjectMapper(); mapper.configure( SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true); // define a module SimpleModule module = new SimpleModule("Default Serializer", new Version(0, 1, 1, "FINAL")); // add various serializers to the module // add default (all-pass) serializer for all rumen specific data types module.addSerializer(DataType.class, new DefaultRumenSerializer()); // add a serializer to use object.toString() while serializing module.addSerializer(ID.class, new ObjectStringSerializer<ID>()); // register the module with the object-mapper mapper.registerModule(module); mapper.getJsonFactory(); writer = mapper.getJsonFactory().createJsonGenerator( output, JsonEncoding.UTF8); if (prettyPrint) { writer.useDefaultPrettyPrinter(); } }
Example #13
Source File: TestHistograms.java From big-c with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws IOException { final Configuration conf = new Configuration(); final FileSystem lfs = FileSystem.getLocal(conf); for (String arg : args) { Path filePath = new Path(arg).makeQualified(lfs); String fileName = filePath.getName(); if (fileName.startsWith("input")) { LoggedDiscreteCDF newResult = histogramFileToCDF(filePath, lfs); String testName = fileName.substring("input".length()); Path goldFilePath = new Path(filePath.getParent(), "gold"+testName); ObjectMapper mapper = new ObjectMapper(); JsonFactory factory = mapper.getJsonFactory(); FSDataOutputStream ostream = lfs.create(goldFilePath, true); JsonGenerator gen = factory.createJsonGenerator(ostream, JsonEncoding.UTF8); gen.useDefaultPrettyPrinter(); gen.writeObject(newResult); gen.close(); } else { System.err.println("Input file not started with \"input\". File "+fileName+" skipped."); } } }
Example #14
Source File: JsonUtils.java From fountain with Apache License 2.0 | 5 votes |
public byte[] writeValueAsBytes(Object value, JsonSerialize.Inclusion inc) throws IOException, JsonGenerationException, JsonMappingException { if (inc == null) { return super.writeValueAsBytes(value); } // alas, we have to pull the recycler directly here... ByteArrayBuilder bb = new ByteArrayBuilder(_jsonFactory._getBufferRecycler()); writeValueWithConf(_jsonFactory.createJsonGenerator(bb, JsonEncoding.UTF8), value,inc); byte[] result = bb.toByteArray(); bb.release(); return result; }
Example #15
Source File: TestHistograms.java From hadoop with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws IOException { final Configuration conf = new Configuration(); final FileSystem lfs = FileSystem.getLocal(conf); for (String arg : args) { Path filePath = new Path(arg).makeQualified(lfs); String fileName = filePath.getName(); if (fileName.startsWith("input")) { LoggedDiscreteCDF newResult = histogramFileToCDF(filePath, lfs); String testName = fileName.substring("input".length()); Path goldFilePath = new Path(filePath.getParent(), "gold"+testName); ObjectMapper mapper = new ObjectMapper(); JsonFactory factory = mapper.getJsonFactory(); FSDataOutputStream ostream = lfs.create(goldFilePath, true); JsonGenerator gen = factory.createJsonGenerator(ostream, JsonEncoding.UTF8); gen.useDefaultPrettyPrinter(); gen.writeObject(newResult); gen.close(); } else { System.err.println("Input file not started with \"input\". File "+fileName+" skipped."); } } }
Example #16
Source File: GlobalMetadata.java From incubator-gobblin with Apache License 2.0 | 5 votes |
/** * Serialize as a UTF8 encoded JSON string. */ public byte[] toJsonUtf8() { try { ByteArrayOutputStream bOs = new ByteArrayOutputStream(512); try (JsonGenerator generator = jsonFactory.createJsonGenerator(bOs, JsonEncoding.UTF8) .setCodec(objectMapper)) { toJsonUtf8(generator); } return bOs.toByteArray(); } catch (IOException e) { throw new RuntimeException("Unexpected IOException serializing to ByteArray", e); } }
Example #17
Source File: GlobalMetadata.java From incubator-gobblin with Apache License 2.0 | 5 votes |
public String getId() { if (cachedId != null) { return cachedId; } if (datasetLevel.size() == 0 && fileLevel.size() == 0) { cachedId = EMPTY_ID; return cachedId; } try { // ID is calculated by serializing body to JSON and then taking that hash ByteArrayOutputStream bOs = new ByteArrayOutputStream(512); MessageDigest md5Digest = MessageDigest.getInstance("MD5"); try (JsonGenerator generator = jsonFactory.createJsonGenerator(bOs, JsonEncoding.UTF8).setCodec(objectMapper)) { generator.writeStartObject(); bodyToJsonUtf8(generator); generator.writeEndObject(); } byte[] digestBytes = md5Digest.digest(bOs.toByteArray()); cachedId = DatatypeConverter.printHexBinary(digestBytes); return cachedId; } catch (IOException|NoSuchAlgorithmException e) { throw new RuntimeException("Unexpected exception generating id", e); } }
Example #18
Source File: JsonStorage.java From spork with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") public void putNext(Tuple t) throws IOException { // Build a ByteArrayOutputStream to write the JSON into ByteArrayOutputStream baos = new ByteArrayOutputStream(BUF_SIZE); // Build the generator JsonGenerator json = jsonFactory.createJsonGenerator(baos, JsonEncoding.UTF8); // Write the beginning of the top level tuple object json.writeStartObject(); ResourceFieldSchema[] fields = schema.getFields(); for (int i = 0; i < fields.length; i++) { int tupleLength = t.size(); //write col if exists in tuple, null otherwise if (i < tupleLength) { writeField(json, fields[i], t.get(i)); } else { writeField(json, fields[i], null); } } json.writeEndObject(); json.close(); // Hand a null key and our string to Hadoop try { writer.write(null, new Text(baos.toByteArray())); } catch (InterruptedException ie) { throw new IOException(ie); } }
Example #19
Source File: TestHistograms.java From RDFS with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws IOException { final Configuration conf = new Configuration(); final FileSystem lfs = FileSystem.getLocal(conf); for (String arg : args) { Path filePath = new Path(arg).makeQualified(lfs); String fileName = filePath.getName(); if (fileName.startsWith("input")) { LoggedDiscreteCDF newResult = histogramFileToCDF(filePath, lfs); String testName = fileName.substring("input".length()); Path goldFilePath = new Path(filePath.getParent(), "gold"+testName); ObjectMapper mapper = new ObjectMapper(); JsonFactory factory = mapper.getJsonFactory(); FSDataOutputStream ostream = lfs.create(goldFilePath, true); JsonGenerator gen = factory.createJsonGenerator(ostream, JsonEncoding.UTF8); gen.useDefaultPrettyPrinter(); gen.writeObject(newResult); gen.close(); } else { System.err.println("Input file not started with \"input\". File "+fileName+" skipped."); } } }
Example #20
Source File: JacksonJsonGenerator.java From elasticsearch-hadoop with Apache License 2.0 | 5 votes |
public JacksonJsonGenerator(OutputStream out) { try { this.out = out; // use dedicated method to lower Jackson requirement this.generator = JSON_FACTORY.createJsonGenerator(out, JsonEncoding.UTF8); } catch (IOException ex) { throw new EsHadoopSerializationException(ex); } }
Example #21
Source File: Json.java From projectforge-webapp with GNU General Public License v3.0 | 4 votes |
@Override public JsonGenerator createJsonGenerator(File f, JsonEncoding enc) throws IOException { return super.createJsonGenerator(f, enc).useDefaultPrettyPrinter(); }
Example #22
Source File: Json.java From projectforge-webapp with GNU General Public License v3.0 | 4 votes |
@Override public JsonGenerator createJsonGenerator(OutputStream out, JsonEncoding enc) throws IOException { return super.createJsonGenerator(out, enc).useDefaultPrettyPrinter(); }