Java Code Examples for org.codehaus.jackson.JsonGenerator

The following are top voted examples for showing how to use org.codehaus.jackson.JsonGenerator. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: hadoop-oss   File: Display.java   View source code 7 votes vote down vote up
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  FileContext fc = FileContext.getFileContext(new Configuration());
  fileReader =
    DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
 
Example 2
Project: lams   File: JacksonObjectMapperFactoryBean.java   View source code 6 votes vote down vote up
private void configureFeature(Object feature, boolean enabled) {
	if (feature instanceof JsonParser.Feature) {
		this.objectMapper.configure((JsonParser.Feature) feature, enabled);
	}
	else if (feature instanceof JsonGenerator.Feature) {
		this.objectMapper.configure((JsonGenerator.Feature) feature, enabled);
	}
	else if (feature instanceof SerializationConfig.Feature) {
		this.objectMapper.configure((SerializationConfig.Feature) feature, enabled);
	}
	else if (feature instanceof DeserializationConfig.Feature) {
		this.objectMapper.configure((DeserializationConfig.Feature) feature, enabled);
	}
	else {
		throw new IllegalArgumentException("Unknown feature class: " + feature.getClass().getName());
	}
}
 
Example 3
Project: lams   File: MappingJacksonHttpMessageConverter.java   View source code 6 votes vote down vote up
@Override
protected void writeInternal(Object object, HttpOutputMessage outputMessage)
		throws IOException, HttpMessageNotWritableException {

	JsonEncoding encoding = getJsonEncoding(outputMessage.getHeaders().getContentType());
	JsonGenerator jsonGenerator =
			this.objectMapper.getJsonFactory().createJsonGenerator(outputMessage.getBody(), encoding);

	// A workaround for JsonGenerators not applying serialization features
	// https://github.com/FasterXML/jackson-databind/issues/12
	if (this.objectMapper.getSerializationConfig().isEnabled(SerializationConfig.Feature.INDENT_OUTPUT)) {
		jsonGenerator.useDefaultPrettyPrinter();
	}

	try {
		if (this.jsonPrefix != null) {
			jsonGenerator.writeRaw(this.jsonPrefix);
		}
		this.objectMapper.writeValue(jsonGenerator, object);
	}
	catch (JsonProcessingException ex) {
		throw new HttpMessageNotWritableException("Could not write JSON: " + ex.getMessage(), ex);
	}
}
 
Example 4
Project: hadoop   File: StatePool.java   View source code 6 votes vote down vote up
private void write(DataOutput out) throws IOException {
  // This is just a JSON experiment
  System.out.println("Dumping the StatePool's in JSON format.");
  ObjectMapper outMapper = new ObjectMapper();
  outMapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
  // define a module
  SimpleModule module = new SimpleModule("State Serializer",  
      new Version(0, 1, 1, "FINAL"));
  // add the state serializer
  //module.addSerializer(State.class, new StateSerializer());

  // register the module with the object-mapper
  outMapper.registerModule(module);

  JsonFactory outFactory = outMapper.getJsonFactory();
  JsonGenerator jGen = 
    outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8);
  jGen.useDefaultPrettyPrinter();

  jGen.writeObject(this);
  jGen.close();
}
 
Example 5
Project: hadoop   File: Anonymizer.java   View source code 6 votes vote down vote up
private JsonGenerator createJsonGenerator(Configuration conf, Path path) 
throws IOException {
  FileSystem outFS = path.getFileSystem(conf);
  CompressionCodec codec =
    new CompressionCodecFactory(conf).getCodec(path);
  OutputStream output;
  Compressor compressor = null;
  if (codec != null) {
    compressor = CodecPool.getCompressor(codec);
    output = codec.createOutputStream(outFS.create(path), compressor);
  } else {
    output = outFS.create(path);
  }

  JsonGenerator outGen = outFactory.createJsonGenerator(output, 
                                                        JsonEncoding.UTF8);
  outGen.useDefaultPrettyPrinter();
  
  return outGen;
}
 
Example 6
Project: hadoop   File: TestHistograms.java   View source code 6 votes vote down vote up
public static void main(String[] args) throws IOException {
  final Configuration conf = new Configuration();
  final FileSystem lfs = FileSystem.getLocal(conf);

  for (String arg : args) {
    Path filePath = new Path(arg).makeQualified(lfs);
    String fileName = filePath.getName();
    if (fileName.startsWith("input")) {
      LoggedDiscreteCDF newResult = histogramFileToCDF(filePath, lfs);
      String testName = fileName.substring("input".length());
      Path goldFilePath = new Path(filePath.getParent(), "gold"+testName);

      ObjectMapper mapper = new ObjectMapper();
      JsonFactory factory = mapper.getJsonFactory();
      FSDataOutputStream ostream = lfs.create(goldFilePath, true);
      JsonGenerator gen = factory.createJsonGenerator(ostream,
          JsonEncoding.UTF8);
      gen.useDefaultPrettyPrinter();
      
      gen.writeObject(newResult);
      
      gen.close();
    } else {
      System.err.println("Input file not started with \"input\". File "+fileName+" skipped.");
    }
  }
}
 
Example 7
Project: hadoop   File: Display.java   View source code 6 votes vote down vote up
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  FileContext fc = FileContext.getFileContext(new Configuration());
  fileReader =
    DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
 
Example 8
Project: embulk-input-parquet_hadoop   File: CSVAsJSONIterator.java   View source code 6 votes vote down vote up
@Override
public String next()
{
    CSVRecord record = inner.next();
    StringWriter json = new StringWriter();
    try {
        JsonGenerator gen = jsonFactory.createJsonGenerator(json);
        gen.writeStartObject();
        for (CSVHeaderMap.Entry entry : headerMap.entries()) {
            String name = entry.getName();
            String value = record.get(entry.getIndex());

            gen.writeFieldName(name);
            entry.getWriter().write(gen, value);
        }
        gen.writeEndObject();
        gen.close();
    }
    catch (IOException e) {
        throw new RuntimeException(e);
    }
    return json.toString();
}
 
Example 9
Project: ditb   File: JSONBean.java   View source code 6 votes vote down vote up
private static void writeAttribute(JsonGenerator jg, String attName, final String descriptionStr,
    Object value)
throws IOException {
  boolean description = false;
  if (descriptionStr != null && descriptionStr.length() > 0 && !attName.equals(descriptionStr)) {
    description = true;
    jg.writeFieldName(attName);
    jg.writeStartObject();
    jg.writeFieldName("description");
    jg.writeString(descriptionStr);
    jg.writeFieldName("value");
    writeObject(jg, description, value);
    jg.writeEndObject();
  } else {
    jg.writeFieldName(attName);
    writeObject(jg, description, value);
  }
}
 
Example 10
Project: aliyun-oss-hadoop-fs   File: StatePool.java   View source code 6 votes vote down vote up
private void write(DataOutput out) throws IOException {
  // This is just a JSON experiment
  System.out.println("Dumping the StatePool's in JSON format.");
  ObjectMapper outMapper = new ObjectMapper();
  outMapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
  // define a module
  SimpleModule module = new SimpleModule("State Serializer",  
      new Version(0, 1, 1, "FINAL"));
  // add the state serializer
  //module.addSerializer(State.class, new StateSerializer());

  // register the module with the object-mapper
  outMapper.registerModule(module);

  JsonFactory outFactory = outMapper.getJsonFactory();
  JsonGenerator jGen = 
    outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8);
  jGen.useDefaultPrettyPrinter();

  jGen.writeObject(this);
  jGen.close();
}
 
Example 11
Project: aliyun-oss-hadoop-fs   File: Anonymizer.java   View source code 6 votes vote down vote up
private JsonGenerator createJsonGenerator(Configuration conf, Path path) 
throws IOException {
  FileSystem outFS = path.getFileSystem(conf);
  CompressionCodec codec =
    new CompressionCodecFactory(conf).getCodec(path);
  OutputStream output;
  Compressor compressor = null;
  if (codec != null) {
    compressor = CodecPool.getCompressor(codec);
    output = codec.createOutputStream(outFS.create(path), compressor);
  } else {
    output = outFS.create(path);
  }

  JsonGenerator outGen = outFactory.createJsonGenerator(output, 
                                                        JsonEncoding.UTF8);
  outGen.useDefaultPrettyPrinter();
  
  return outGen;
}
 
Example 12
Project: aliyun-oss-hadoop-fs   File: Display.java   View source code 6 votes vote down vote up
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  FileContext fc = FileContext.getFileContext(new Configuration());
  fileReader =
    DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
 
Example 13
Project: bdglue   File: JsonEncoder.java   View source code 6 votes vote down vote up
/**
 * Populate transaction meta data into the record if requested.
 *
 * @param jg the handle to the JsonGenerator
 * @param op the database operation we are processing.
 * @throws IOException if a JSON encoding error occurs
 */
private void setBeforeInfo(JsonGenerator jg, DownstreamOperation op) throws IOException {
    LOG.debug("setBeforeInfo()");

    jg.writeFieldName("priorValues");
    jg.writeStartObject();
    // loop through operatons with calls to appropriate jg.write() methods
    for (DownstreamColumnData col : op.getBefores()) {
        if (textOnly == true) {
            jg.writeStringField(col.getBDName(), col.asString());
        } else {
            // Encode the data appropriately: handle numbers as numbers, etc.
            int jdbcType;
            jdbcType = op.getTableMeta().getColumn(col.getOrigName()).getJdbcType();
            encodeColumn(col, jdbcType, jg);
        }
    }
    jg.writeEndObject();
}
 
Example 14
Project: big-c   File: StatePool.java   View source code 6 votes vote down vote up
private void write(DataOutput out) throws IOException {
  // This is just a JSON experiment
  System.out.println("Dumping the StatePool's in JSON format.");
  ObjectMapper outMapper = new ObjectMapper();
  outMapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
  // define a module
  SimpleModule module = new SimpleModule("State Serializer",  
      new Version(0, 1, 1, "FINAL"));
  // add the state serializer
  //module.addSerializer(State.class, new StateSerializer());

  // register the module with the object-mapper
  outMapper.registerModule(module);

  JsonFactory outFactory = outMapper.getJsonFactory();
  JsonGenerator jGen = 
    outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8);
  jGen.useDefaultPrettyPrinter();

  jGen.writeObject(this);
  jGen.close();
}
 
Example 15
Project: big-c   File: Anonymizer.java   View source code 6 votes vote down vote up
private JsonGenerator createJsonGenerator(Configuration conf, Path path) 
throws IOException {
  FileSystem outFS = path.getFileSystem(conf);
  CompressionCodec codec =
    new CompressionCodecFactory(conf).getCodec(path);
  OutputStream output;
  Compressor compressor = null;
  if (codec != null) {
    compressor = CodecPool.getCompressor(codec);
    output = codec.createOutputStream(outFS.create(path), compressor);
  } else {
    output = outFS.create(path);
  }

  JsonGenerator outGen = outFactory.createJsonGenerator(output, 
                                                        JsonEncoding.UTF8);
  outGen.useDefaultPrettyPrinter();
  
  return outGen;
}
 
Example 16
Project: big-c   File: Display.java   View source code 6 votes vote down vote up
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  FileContext fc = FileContext.getFileContext(new Configuration());
  fileReader =
    DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
 
Example 17
Project: BaijiSerializer4J   File: RecordSchema.java   View source code 6 votes vote down vote up
/**
 * Writes the records schema in JSON format
 *
 * @param writer   JSON writer
 * @param names    list of named schemas already written
 * @param encSpace enclosing namespace of the record schema
 */
@Override
protected void writeJsonFields(JsonGenerator writer, SchemaNames names, String encSpace)
        throws IOException {
    super.writeJsonFields(writer, names, encSpace);

    // we allow reading for empty fields, so writing of records with empty fields are allowed as well
    if (_request) {
        writer.writeFieldName("request");
    } else {
        writer.writeFieldName("fields");
    }
    writer.writeStartArray();

    if (_fields != null && !_fields.isEmpty()) {
        for (Field field : this) {
            field.writeJson(writer, names, getNamespace()); // use the namespace of the record for the fields
        }
    }
    writer.writeEndArray();
}
 
Example 18
Project: hops   File: Anonymizer.java   View source code 6 votes vote down vote up
private JsonGenerator createJsonGenerator(Configuration conf, Path path) 
throws IOException {
  FileSystem outFS = path.getFileSystem(conf);
  CompressionCodec codec =
    new CompressionCodecFactory(conf).getCodec(path);
  OutputStream output;
  Compressor compressor = null;
  if (codec != null) {
    compressor = CodecPool.getCompressor(codec);
    output = codec.createOutputStream(outFS.create(path), compressor);
  } else {
    output = outFS.create(path);
  }

  JsonGenerator outGen = outFactory.createJsonGenerator(output, 
                                                        JsonEncoding.UTF8);
  outGen.useDefaultPrettyPrinter();
  
  return outGen;
}
 
Example 19
Project: apex-core   File: JsonStreamCodec.java   View source code 6 votes vote down vote up
public JsonStreamCodec(Map<Class<?>, Class<? extends StringCodec<?>>> codecs)
{
  JacksonObjectMapperProvider jomp = new JacksonObjectMapperProvider();
  if (codecs != null) {
    for (Map.Entry<Class<?>, Class<? extends StringCodec<?>>> entry: codecs.entrySet()) {
      try {
        @SuppressWarnings("unchecked")
        final StringCodec<Object> codec = (StringCodec<Object>)entry.getValue().newInstance();
        jomp.addSerializer(new SerializerBase(entry.getKey())
        {
          @Override
          public void serialize(Object value, JsonGenerator jgen, SerializerProvider provider) throws IOException
          {
            jgen.writeString(codec.toString(value));
          }

        });
      } catch (Exception ex) {
        logger.error("Caught exception when instantiating codec for class {}", entry.getKey().getName(), ex);
      }
    }
  }
  mapper = jomp.getContext(null);
}
 
Example 20
Project: gravity   File: MapJsonSerializer.java   View source code 6 votes vote down vote up
@Override
public void serialize(Map<String, Object> fields, JsonGenerator jgen,
		SerializerProvider provider) throws IOException,
		JsonProcessingException {
	jgen.writeStartObject();
	for (Entry<String, Object> entry : fields.entrySet()) {
		Object objectValue = entry.getValue();
		if (objectValue instanceof Date) {
			Date date = (Date) objectValue;
			SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ");
			simpleDateFormat.setTimeZone(TimeZone.getTimeZone("Pacific/Auckland"));
			String formattedDate = simpleDateFormat.format(date);
			jgen.writeObjectField(entry.getKey().toString(), formattedDate);
		} else {
			jgen.writeObjectField(entry.getKey().toString(), objectValue);
		}
	}
	jgen.writeEndObject();
}
 
Example 21
Project: hadoop-2.6.0-cdh5.4.3   File: StatePool.java   View source code 6 votes vote down vote up
private void write(DataOutput out) throws IOException {
  // This is just a JSON experiment
  System.out.println("Dumping the StatePool's in JSON format.");
  ObjectMapper outMapper = new ObjectMapper();
  outMapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
  // define a module
  SimpleModule module = new SimpleModule("State Serializer",  
      new Version(0, 1, 1, "FINAL"));
  // add the state serializer
  //module.addSerializer(State.class, new StateSerializer());

  // register the module with the object-mapper
  outMapper.registerModule(module);

  JsonFactory outFactory = outMapper.getJsonFactory();
  JsonGenerator jGen = 
    outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8);
  jGen.useDefaultPrettyPrinter();

  jGen.writeObject(this);
  jGen.close();
}
 
Example 22
Project: hadoop-2.6.0-cdh5.4.3   File: Anonymizer.java   View source code 6 votes vote down vote up
private JsonGenerator createJsonGenerator(Configuration conf, Path path) 
throws IOException {
  FileSystem outFS = path.getFileSystem(conf);
  CompressionCodec codec =
    new CompressionCodecFactory(conf).getCodec(path);
  OutputStream output;
  Compressor compressor = null;
  if (codec != null) {
    compressor = CodecPool.getCompressor(codec);
    output = codec.createOutputStream(outFS.create(path), compressor);
  } else {
    output = outFS.create(path);
  }

  JsonGenerator outGen = outFactory.createJsonGenerator(output, 
                                                        JsonEncoding.UTF8);
  outGen.useDefaultPrettyPrinter();
  
  return outGen;
}
 
Example 23
Project: hadoop-EAR   File: SessionManager.java   View source code 6 votes vote down vote up
/**
 * Used to write the state of the SessionManager instance to disk, when we
 * are persisting the state of the ClusterManager
 * @param jsonGenerator The JsonGenerator instance being used to write JSON
 *                      to disk
 * @throws IOException
 */
public void write(JsonGenerator jsonGenerator) throws IOException {
  jsonGenerator.writeStartObject();
  // retiredSessions and numRetiredSessions need not be persisted

  // sessionCounter can be set to 0, when the SessionManager is instantiated

  // sessions begins
  jsonGenerator.writeFieldName("sessions");
  jsonGenerator.writeStartObject();
  for (String sessionId : sessions.keySet()) {
    jsonGenerator.writeFieldName(sessionId);
    sessions.get(sessionId).write(jsonGenerator);
  }
  jsonGenerator.writeEndObject();
  // sessions ends

  jsonGenerator.writeNumberField("sessionCounter",
                                  sessionCounter.longValue());

  jsonGenerator.writeEndObject();

  // We can rebuild runnableSessions
  // No need to write startTime and numRetiredSessions
}
 
Example 24
Project: teiid-webui   File: DataVirtUploadServlet.java   View source code 6 votes vote down vote up
/**
 * Writes the response values back to the http response.  This allows the calling code to
 * parse the response values for display to the user.
 *
 * @param responseMap the response params to write to the http response
 * @param response the http response
 * @throws IOException
 */
private static void writeToResponse(Map<String, String> responseMap, HttpServletResponse response) throws IOException {
       // Note: setting the content-type to text/html because otherwise IE prompt the user to download
       // the result rather than handing it off to the GWT form response handler.
       // See JIRA issue https://issues.jboss.org/browse/SRAMPUI-103
	response.setContentType("text/html; charset=UTF8"); //$NON-NLS-1$
       JsonFactory f = new JsonFactory();
       JsonGenerator g = f.createJsonGenerator(response.getOutputStream(), JsonEncoding.UTF8);
       g.useDefaultPrettyPrinter();
       g.writeStartObject();
       for (java.util.Map.Entry<String, String> entry : responseMap.entrySet()) {
           String key = entry.getKey();
           String val = entry.getValue();
           g.writeStringField(key, val);
       }
       g.writeEndObject();
       g.flush();
       g.close();
}
 
Example 25
Project: Multipath-Hedera-system-in-Floodlight-controller   File: CumulativeTimeBucketJSONSerializer.java   View source code 6 votes vote down vote up
/**
  * Performs the serialization of a OneComponentTime object
  */
@Override
public void serialize(CumulativeTimeBucket ctb,
                JsonGenerator jGen,
                SerializerProvider serializer) 
                throws IOException, JsonProcessingException {
    jGen.writeStartObject();
    Timestamp ts = new Timestamp(ctb.getStartTimeNs()/1000000);
    jGen.writeStringField("start-time", ts.toString());
    jGen.writeStringField("current-time", 
      new Timestamp(System.currentTimeMillis()).toString());
    jGen.writeNumberField("total-packets", ctb.getTotalPktCnt());
    jGen.writeNumberField("average", ctb.getAverageProcTimeNs());
    jGen.writeNumberField("min", ctb.getMinTotalProcTimeNs());
    jGen.writeNumberField("max", ctb.getMaxTotalProcTimeNs());
    jGen.writeNumberField("std-dev", ctb.getTotalSigmaProcTimeNs());
    jGen.writeArrayFieldStart("modules");
    for (OneComponentTime oct : ctb.getModules()) {
        serializer.defaultSerializeValue(oct, jGen);
    }
    jGen.writeEndArray();
    jGen.writeEndObject();
}
 
Example 26
Project: hadoop-plus   File: Anonymizer.java   View source code 6 votes vote down vote up
private JsonGenerator createJsonGenerator(Configuration conf, Path path) 
throws IOException {
  FileSystem outFS = path.getFileSystem(conf);
  CompressionCodec codec =
    new CompressionCodecFactory(conf).getCodec(path);
  OutputStream output;
  Compressor compressor = null;
  if (codec != null) {
    compressor = CodecPool.getCompressor(codec);
    output = codec.createOutputStream(outFS.create(path), compressor);
  } else {
    output = outFS.create(path);
  }

  JsonGenerator outGen = outFactory.createJsonGenerator(output, 
                                                        JsonEncoding.UTF8);
  outGen.useDefaultPrettyPrinter();
  
  return outGen;
}
 
Example 27
Project: BaijiSerializer4J   File: NamedSchema.java   View source code 6 votes vote down vote up
/**
 * Writes named schema in JSON format
 *
 * @param gen      JSON generator
 * @param names    list of named schemas already written
 * @param encSpace enclosing namespace of the schema
 */
@Override
protected void writeJsonFields(JsonGenerator gen, SchemaNames names, String encSpace) throws IOException {
    _schemaName.writeJson(gen, names);

    if (_doc != null && !_doc.isEmpty()) {
        gen.writeFieldName("doc");
        gen.writeString(_doc);
    }

    if (_aliases != null) {
        gen.writeFieldName("aliases");
        gen.writeStartArray();
        for (SchemaName name : _aliases) {
            String fullname = name.getSpace() != null ? name.getSpace() + "." + name.getName() : name.getName();
            gen.writeString(fullname);
        }
        gen.writeEndArray();
    }
}
 
Example 28
Project: BaijiSerializer4J   File: Schema.java   View source code 6 votes vote down vote up
/**
 * Render this as <a href="http://json.org/">JSON</a>.
 *
 * @param pretty if true, pretty-print JSON.
 */
public String toString(boolean pretty) {
    try {
        StringWriter writer = new StringWriter();
        JsonGenerator gen = FACTORY.createJsonGenerator(writer);
        if (pretty) gen.useDefaultPrettyPrinter();

        if (this instanceof PrimitiveSchema || this instanceof UnionSchema) {
            gen.writeStartObject();
            gen.writeFieldName("type");
        }

        writeJson(gen, new SchemaNames(), null);

        if (this instanceof PrimitiveSchema || this instanceof UnionSchema) {
            gen.writeEndObject();
        }

        gen.flush();
        return writer.toString();
    } catch (IOException e) {
        throw new BaijiRuntimeException(e);
    }
}
 
Example 29
Project: tasman   File: UnixDockerClient.java   View source code 6 votes vote down vote up
@Override
public JsonNode callUrl(String u) throws IOException {
	String url = u;
	HttpGet httpget = new HttpGet(url);
	logger.info("Opening url: "+url);
	
	CloseableHttpResponse response = httpclient.execute(httpget);
	ByteArrayOutputStream baos = new ByteArrayOutputStream();
	response.getEntity().writeTo(baos);
	ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
	ObjectMapper mapper = new ObjectMapper();
	mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
	JsonNode node = mapper.readTree(bais);
	response.close();
	return node;
}
 
Example 30
Project: BaijiSerializer4J   File: SpecificJsonWriter.java   View source code 6 votes vote down vote up
/**
 * The only public write interface
 * @param schema the object schema
 * @param obj the object
 * @param os the final output stream
 */
public void write(Schema schema, T obj, OutputStream os) {
    if (schema instanceof RecordSchema) {
        if (os != null) {
            try {
                RecordSchema recordSchema = (RecordSchema) schema;
                JsonGenerator g = FACTORY.createJsonGenerator(os, JsonEncoding.UTF8);
                writeRecord(recordSchema, obj, g);
                g.flush();
            } catch (IOException e) {
                throw new BaijiRuntimeException("Serialize process failed.", e);
            }
        } else {
            throw new BaijiRuntimeException("Output stream can't be null");
        }
    } else {
        throw new BaijiRuntimeException("schema must be RecordSchema");
    }
}
 
Example 31
Project: Bottlenose   File: Schema.java   View source code 6 votes vote down vote up
public String toString(boolean pretty) {
    try {
        StringWriter writer = new StringWriter();
        JsonGenerator gen = FACTORY.createJsonGenerator(writer);
        if (pretty)
            gen.useDefaultPrettyPrinter();

        if (this instanceof PrimitiveSchema || this instanceof UnionSchema) {
            gen.writeStartObject();
            gen.writeFieldName("type");
        }

        writeJSON(gen, new SchemaNames());

        if (this instanceof PrimitiveSchema || this instanceof UnionSchema) {
            gen.writeEndObject();
        }

        gen.flush();
        return writer.toString();
    } catch (IOException e) {
        throw new BaijiRuntimeException(e);
    }
}
 
Example 32
Project: Multipath-Hedera-system-in-Floodlight-controller   File: VirtualNetworkSerializer.java   View source code 6 votes vote down vote up
@Override
public void serialize(VirtualNetwork vNet, JsonGenerator jGen,
        SerializerProvider serializer) throws IOException,
        JsonProcessingException {
    jGen.writeStartObject();
    
    jGen.writeStringField("name", vNet.name);
    jGen.writeStringField("guid", vNet.guid);
    jGen.writeStringField("gateway", vNet.gateway);

    jGen.writeArrayFieldStart("mac");
    Iterator<MACAddress> hit = vNet.hosts.iterator();
    while (hit.hasNext())
        jGen.writeString(hit.next().toString());
    jGen.writeEndArray();
    
    jGen.writeEndObject();
}
 
Example 33
Project: hadoop-oss   File: JMXJsonServlet.java   View source code 5 votes vote down vote up
private void writeObject(JsonGenerator jg, Object value) throws IOException {
  if(value == null) {
    jg.writeNull();
  } else {
    Class<?> c = value.getClass();
    if (c.isArray()) {
      jg.writeStartArray();
      int len = Array.getLength(value);
      for (int j = 0; j < len; j++) {
        Object item = Array.get(value, j);
        writeObject(jg, item);
      }
      jg.writeEndArray();
    } else if(value instanceof Number) {
      Number n = (Number)value;
      jg.writeNumber(n.toString());
    } else if(value instanceof Boolean) {
      Boolean b = (Boolean)value;
      jg.writeBoolean(b);
    } else if(value instanceof CompositeData) {
      CompositeData cds = (CompositeData)value;
      CompositeType comp = cds.getCompositeType();
      Set<String> keys = comp.keySet();
      jg.writeStartObject();
      for(String key: keys) {
        writeAttribute(jg, key, cds.get(key));
      }
      jg.writeEndObject();
    } else if(value instanceof TabularData) {
      TabularData tds = (TabularData)value;
      jg.writeStartArray();
      for(Object entry : tds.values()) {
        writeObject(jg, entry);
      }
      jg.writeEndArray();
    } else {
      jg.writeString(value.toString());
    }
  }
}
 
Example 34
Project: hadoop-oss   File: Log4Json.java   View source code 5 votes vote down vote up
/**
 * Build a JSON entry from the parameters. This is public for testing.
 *
 * @param writer destination
 * @param loggerName logger name
 * @param timeStamp time_t value
 * @param level level string
 * @param threadName name of the thread
 * @param message rendered message
 * @param ti nullable thrown information
 * @return the writer
 * @throws IOException on any problem
 */
public Writer toJson(final Writer writer,
                     final String loggerName,
                     final long timeStamp,
                     final String level,
                     final String threadName,
                     final String message,
                     final ThrowableInformation ti) throws IOException {
  JsonGenerator json = factory.createJsonGenerator(writer);
  json.writeStartObject();
  json.writeStringField(NAME, loggerName);
  json.writeNumberField(TIME, timeStamp);
  Date date = new Date(timeStamp);
  json.writeStringField(DATE, dateFormat.format(date));
  json.writeStringField(LEVEL, level);
  json.writeStringField(THREAD, threadName);
  json.writeStringField(MESSAGE, message);
  if (ti != null) {
    //there is some throwable info, but if the log event has been sent over the wire,
    //there may not be a throwable inside it, just a summary.
    Throwable thrown = ti.getThrowable();
    String eclass = (thrown != null) ?
        thrown.getClass().getName()
        : "";
    json.writeStringField(EXCEPTION_CLASS, eclass);
    String[] stackTrace = ti.getThrowableStrRep();
    json.writeArrayFieldStart(STACK);
    for (String row : stackTrace) {
      json.writeString(row);
    }
    json.writeEndArray();
  }
  json.writeEndObject();
  json.flush();
  json.close();
  return writer;
}
 
Example 35
Project: hadoop-oss   File: Configuration.java   View source code 5 votes vote down vote up
/**
 *  Writes out all the parameters and their properties (final and resource) to
 *  the given {@link Writer}
 *  The format of the output would be 
 *  { "properties" : [ {key1,value1,key1.isFinal,key1.resource}, {key2,value2,
 *  key2.isFinal,key2.resource}... ] } 
 *  It does not output the parameters of the configuration object which is 
 *  loaded from an input stream.
 * @param out the Writer to write to
 * @throws IOException
 */
public static void dumpConfiguration(Configuration config,
    Writer out) throws IOException {
  JsonFactory dumpFactory = new JsonFactory();
  JsonGenerator dumpGenerator = dumpFactory.createJsonGenerator(out);
  dumpGenerator.writeStartObject();
  dumpGenerator.writeFieldName("properties");
  dumpGenerator.writeStartArray();
  dumpGenerator.flush();
  synchronized (config) {
    for (Map.Entry<Object,Object> item: config.getProps().entrySet()) {
      dumpGenerator.writeStartObject();
      dumpGenerator.writeStringField("key", (String) item.getKey());
      dumpGenerator.writeStringField("value", 
                                     config.get((String) item.getKey()));
      dumpGenerator.writeBooleanField("isFinal",
                                      config.finalParameters.contains(item.getKey()));
      String[] resources = config.updatingResource.get(item.getKey());
      String resource = UNKNOWN_RESOURCE;
      if(resources != null && resources.length > 0) {
        resource = resources[0];
      }
      dumpGenerator.writeStringField("resource", resource);
      dumpGenerator.writeEndObject();
    }
  }
  dumpGenerator.writeEndArray();
  dumpGenerator.writeEndObject();
  dumpGenerator.flush();
}
 
Example 36
Project: jasvorno   File: JasvornoEncoder.java   View source code 5 votes vote down vote up
private static JsonGenerator getJsonGenerator(OutputStream out) throws IOException {
  if (null == out) {
    throw new NullPointerException("OutputStream cannot be null");
  }
  JsonGenerator g = new JsonFactory().createJsonGenerator(out, JsonEncoding.UTF8);
  MinimalPrettyPrinter pp = new MinimalPrettyPrinter();
  pp.setRootValueSeparator(System.getProperty("line.separator"));
  g.setPrettyPrinter(pp);
  return g;
}
 
Example 37
Project: aliyun-maxcompute-data-collectors   File: JsonHelper.java   View source code 5 votes vote down vote up
public static String beanToJson(Object bean) {
    StringWriter sw = new StringWriter();
    try {
        JsonGenerator jsongenerator = objmapper.getJsonFactory().createJsonGenerator(sw);
        objmapper.writeValue(jsongenerator, bean);
        jsongenerator.close();
    } catch (IOException e) {
        LOG.error("", e);
        return "";
    }
    return sw.toString();

}
 
Example 38
Project: hadoop   File: QueueManager.java   View source code 5 votes vote down vote up
/***
 * Dumps the configuration of hierarchy of queues with 
 * the xml file path given. It is to be used directly ONLY FOR TESTING.
 * @param out the writer object to which dump is written to.
 * @param configFile the filename of xml file
 * @throws IOException
 */
static void dumpConfiguration(Writer out, String configFile,
    Configuration conf) throws IOException {
  if (conf != null && conf.get(DeprecatedQueueConfigurationParser.
      MAPRED_QUEUE_NAMES_KEY) != null) {
    return;
  }
  
  JsonFactory dumpFactory = new JsonFactory();
  JsonGenerator dumpGenerator = dumpFactory.createJsonGenerator(out);
  QueueConfigurationParser parser;
  boolean aclsEnabled = false;
  if (conf != null) {
    aclsEnabled = conf.getBoolean(MRConfig.MR_ACLS_ENABLED, false);
  }
  if (configFile != null && !"".equals(configFile)) {
    parser = new QueueConfigurationParser(configFile, aclsEnabled);
  }
  else {
    parser = getQueueConfigurationParser(null, false, aclsEnabled);
  }
  dumpGenerator.writeStartObject();
  dumpGenerator.writeFieldName("queues");
  dumpGenerator.writeStartArray();
  dumpConfiguration(dumpGenerator,parser.getRoot().getChildren());
  dumpGenerator.writeEndArray();
  dumpGenerator.writeEndObject();
  dumpGenerator.flush();
}
 
Example 39
Project: hadoop   File: Anonymizer.java   View source code 5 votes vote down vote up
private void anonymizeTrace() throws Exception {
  if (anonymizeTrace) {
    System.out.println("Anonymizing trace file: " + inputTracePath);
    JobTraceReader reader = null;
    JsonGenerator outGen = null;
    Configuration conf = getConf();
    
    try {
      // create a generator
      outGen = createJsonGenerator(conf, outputTracePath);

      // define the input trace reader
      reader = new JobTraceReader(inputTracePath, conf);
      
      // read the plain unanonymized logged job
      LoggedJob job = reader.getNext();
      
      while (job != null) {
        // write it via an anonymizing channel
        outGen.writeObject(job);
        // read the next job
        job = reader.getNext();
      }
      
      System.out.println("Anonymized trace file: " + outputTracePath);
    } finally {
      if (outGen != null) {
        outGen.close();
      }
      if (reader != null) {
        reader.close();
      }
    }
  }
}
 
Example 40
Project: hadoop   File: Anonymizer.java   View source code 5 votes vote down vote up
private void anonymizeTopology() throws Exception {
  if (anonymizeTopology) {
    System.out.println("Anonymizing topology file: " + inputTopologyPath);
    ClusterTopologyReader reader = null;
    JsonGenerator outGen = null;
    Configuration conf = getConf();

    try {
      // create a generator
      outGen = createJsonGenerator(conf, outputTopologyPath);

      // define the input cluster topology reader
      reader = new ClusterTopologyReader(inputTopologyPath, conf);
      
      // read the plain unanonymized logged job
      LoggedNetworkTopology job = reader.get();
      
      // write it via an anonymizing channel
      outGen.writeObject(job);
      
      System.out.println("Anonymized topology file: " + outputTopologyPath);
    } finally {
      if (outGen != null) {
        outGen.close();
      }
    }
  }
}