com.fasterxml.jackson.databind.util.TokenBuffer Java Examples

The following examples show how to use com.fasterxml.jackson.databind.util.TokenBuffer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AbstractJackson2Decoder.java    From spring-analysis-note with MIT License 6 votes vote down vote up
@Override
public Flux<Object> decode(Publisher<DataBuffer> input, ResolvableType elementType,
		@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {

	Flux<TokenBuffer> tokens = Jackson2Tokenizer.tokenize(
			Flux.from(input), this.jsonFactory, getObjectMapper(), true);

	ObjectReader reader = getObjectReader(elementType, hints);

	return tokens.handle((tokenBuffer, sink) -> {
		try {
			Object value = reader.readValue(tokenBuffer.asParser(getObjectMapper()));
			logValue(value, hints);
			if (value != null) {
				sink.next(value);
			}
		}
		catch (IOException ex) {
			sink.error(processException(ex));
		}
	});
}
 
Example #2
Source File: BeanDeserializer.java    From lams with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Helper method called for rare case of pointing to {@link JsonToken#VALUE_NULL}
 * token. While this is most often an erroneous condition, there is one specific
 * case with XML handling where polymorphic type with no properties is exposed
 * as such, and should be handled same as empty Object.
 *
 * @since 2.7
 */
protected Object deserializeFromNull(JsonParser p, DeserializationContext ctxt)
    throws IOException
{
    // 17-Dec-2015, tatu: Highly specialized case, mainly to support polymorphic
    //   "empty" POJOs deserialized from XML, where empty XML tag synthesizes a
    //   `VALUE_NULL` token.
    if (p.requiresCustomCodec()) { // not only XML module, but mostly it...
        @SuppressWarnings("resource")
        TokenBuffer tb = new TokenBuffer(p, ctxt);
        tb.writeEndObject();
        JsonParser p2 = tb.asParser(p);
        p2.nextToken(); // to point to END_OBJECT
        // note: don't have ObjectId to consider at this point, so:
        Object ob = _vanillaProcessing ? vanillaDeserialize(p2, ctxt, JsonToken.END_OBJECT)
                : deserializeFromObject(p2, ctxt);
        p2.close();
        return ob;
    }
    return ctxt.handleUnexpectedToken(handledType(), p);
}
 
Example #3
Source File: StdKeyDeserializer.java    From lams with GNU General Public License v2.0 6 votes vote down vote up
@SuppressWarnings("resource")
@Override
public final Object deserializeKey(String key, DeserializationContext ctxt)
    throws IOException
{
    if (key == null) { // is this even legal call?
        return null;
    }
    TokenBuffer tb = new TokenBuffer(ctxt.getParser(), ctxt);
    tb.writeString(key);
    try {
        // Ugh... should not have to give parser which may or may not be correct one...
        JsonParser p = tb.asParser();
        p.nextToken();
        Object result = _delegate.deserialize(p, ctxt);
        if (result != null) {
            return result;
        }
        return ctxt.handleWeirdKey(_keyClass, key, "not a valid representation");
    } catch (Exception re) {
        return ctxt.handleWeirdKey(_keyClass, key, "not a valid representation: %s", re.getMessage());
    }
}
 
Example #4
Source File: GuidedJsonDecoder.java    From gcp-ingestion with Mozilla Public License 2.0 6 votes vote down vote up
/**
 * Read a string from the current location in parser.
 *
 * <p>This method differs from the original JsonDecoder by serializing all
 * structures captured by the current token into a JSON string. This enables
 * consistent behavior for handling variant types (e.g. field that can be a
 * boolean and a string) and for under-specified schemas.
 *
 * <p>This encoding is lossy because JSON strings are conflated with standard
 * strings. Consider the case where a number is decoded into a string. To
 * convert this Avro file back into the original JSON document, the encoder
 * must parse all strings as JSON and inline them into the tree. Now, if the
 * original JSON represents a JSON object as a string (e.g. `{"payload":
 * "{\"foo\":\"bar\"}"`), then the encoder will generate a new object that is
 * different from the original.
 *
 * <p>There are a few ways to avoid this if it is undesirable. One way is to use
 * a binary encoding for the JSON data such as BSON or base64. A second is to
 * normalize documents to avoid nested JSON encodings and to specify a schema
 * explictly to guide the proper typing.
 */
@Override
public String readString() throws IOException {
  parser.advance(Symbol.STRING);
  if (parser.topSymbol() == Symbol.MAP_KEY_MARKER) {
    parser.advance(Symbol.MAP_KEY_MARKER);
    assertCurrentToken(JsonToken.FIELD_NAME, "map-key");
  }

  String result = null;
  if (in.getCurrentToken() == JsonToken.VALUE_STRING
      || in.getCurrentToken() == JsonToken.FIELD_NAME) {
    result = in.getValueAsString();
  } else {
    // Does this create excessive garbage collection?
    TokenBuffer buffer = new TokenBuffer(in);
    buffer.copyCurrentStructure(in);
    result = mapper.readTree(buffer.asParser()).toString();
    buffer.close();
  }
  in.nextToken();
  return result;
}
 
Example #5
Source File: Jackson2TokenizerTests.java    From java-technology-stack with MIT License 6 votes vote down vote up
private void testTokenize(List<String> source, List<String> expected, boolean tokenizeArrayElements) {

		Flux<TokenBuffer> tokenBufferFlux = Jackson2Tokenizer.tokenize(
				Flux.fromIterable(source).map(this::stringBuffer),
				this.jsonFactory,
				tokenizeArrayElements);

		Flux<String> result = tokenBufferFlux
				.map(tokenBuffer -> {
					try {
						TreeNode root = this.objectMapper.readTree(tokenBuffer.asParser());
						return this.objectMapper.writeValueAsString(root);
					}
					catch (IOException ex) {
						throw new UncheckedIOException(ex);
					}
				});

		StepVerifier.FirstStep<String> builder = StepVerifier.create(result);
		expected.forEach(s -> builder.assertNext(new JSONAssertConsumer(s)));
		builder.verifyComplete();
	}
 
Example #6
Source File: ExternalTypeHandler.java    From lams with GNU General Public License v2.0 6 votes vote down vote up
@SuppressWarnings("resource")
protected final Object _deserialize(JsonParser p, DeserializationContext ctxt,
        int index, String typeId) throws IOException
{
    JsonParser p2 = _tokens[index].asParser(p);
    JsonToken t = p2.nextToken();
    // 29-Sep-2015, tatu: As per [databind#942], nulls need special support
    if (t == JsonToken.VALUE_NULL) {
        return null;
    }
    TokenBuffer merged = new TokenBuffer(p, ctxt);
    merged.writeStartArray();
    merged.writeString(typeId);
    merged.copyCurrentStructure(p2);
    merged.writeEndArray();

    // needs to point to START_OBJECT (or whatever first token is)
    JsonParser mp = merged.asParser(p);
    mp.nextToken();
    return _properties[index].getProperty().deserialize(mp, ctxt);
}
 
Example #7
Source File: ExternalTypeHandler.java    From lams with GNU General Public License v2.0 6 votes vote down vote up
@SuppressWarnings("resource")
protected final void _deserializeAndSet(JsonParser p, DeserializationContext ctxt,
        Object bean, int index, String typeId) throws IOException
{
    /* Ok: time to mix type id, value; and we will actually use "wrapper-array"
     * style to ensure we can handle all kinds of JSON constructs.
     */
    JsonParser p2 = _tokens[index].asParser(p);
    JsonToken t = p2.nextToken();
    // 29-Sep-2015, tatu: As per [databind#942], nulls need special support
    if (t == JsonToken.VALUE_NULL) {
        _properties[index].getProperty().set(bean, null);
        return;
    }
    TokenBuffer merged = new TokenBuffer(p, ctxt);
    merged.writeStartArray();
    merged.writeString(typeId);

    merged.copyCurrentStructure(p2);
    merged.writeEndArray();
    // needs to point to START_OBJECT (or whatever first token is)
    JsonParser mp = merged.asParser(p);
    mp.nextToken();
    _properties[index].getProperty().deserializeAndSet(mp, ctxt, bean);
}
 
Example #8
Source File: Jackson2Tokenizer.java    From java-technology-stack with MIT License 6 votes vote down vote up
private Flux<TokenBuffer> parseTokenBufferFlux() throws IOException {
	List<TokenBuffer> result = new ArrayList<>();

	while (true) {
		JsonToken token = this.parser.nextToken();
		// SPR-16151: Smile data format uses null to separate documents
		if ((token == JsonToken.NOT_AVAILABLE) ||
				(token == null && (token = this.parser.nextToken()) == null)) {
			break;
		}
		updateDepth(token);

		if (!this.tokenizeArrayElements) {
			processTokenNormal(token, result);
		}
		else {
			processTokenArray(token, result);
		}
	}
	return Flux.fromIterable(result);
}
 
Example #9
Source File: ObjectMapper.java    From lams with GNU General Public License v2.0 6 votes vote down vote up
@SuppressWarnings({ "unchecked", "resource" })
public <T extends JsonNode> T valueToTree(Object fromValue)
    throws IllegalArgumentException
{
    if (fromValue == null) return null;
    TokenBuffer buf = new TokenBuffer(this, false);
    if (isEnabled(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS)) {
        buf = buf.forceUseOfBigDecimal(true);
    }
    JsonNode result;
    try {
        writeValue(buf, fromValue);
        JsonParser p = buf.asParser();
        result = readTree(p);
        p.close();
    } catch (IOException e) { // should not occur, no real i/o...
        throw new IllegalArgumentException(e.getMessage(), e);
    }
    return (T) result;
}
 
Example #10
Source File: ValuesRel.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private static JsonNode convertToJsonNode(RelDataType rowType, ImmutableList<ImmutableList<RexLiteral>> tuples) throws IOException{
  TokenBuffer out = new TokenBuffer(MAPPER.getFactory().getCodec(), false);
  JsonOutput json = new ExtendedJsonOutput(out);
  json.writeStartArray();
  String[] fields = rowType.getFieldNames().toArray(new String[rowType.getFieldCount()]);

  for(List<RexLiteral> row : tuples){
    json.writeStartObject();
    int i =0;
    for(RexLiteral field : row){
      json.writeFieldName(fields[i]);
      writeLiteral(field, json);
      i++;
    }
    json.writeEndObject();
  }
  json.writeEndArray();
  json.flush();
  return out.asParser().readValueAsTree();
}
 
Example #11
Source File: ConvertCountToDirectScan.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private JsonNode getResultsNode(long count) {
  try{
    TokenBuffer out = new TokenBuffer(MAPPER.getFactory().getCodec(), false);
    JsonOutput json = new ExtendedJsonOutput(out);
    json.writeStartArray();
    json.writeStartObject();
    json.writeFieldName("count");
    json.writeBigInt(count);
    json.writeEndObject();
    json.writeEndArray();
    json.flush();
    return out.asParser().readValueAsTree();
  }catch(IOException ex){
    throw Throwables.propagate(ex);
  }
}
 
Example #12
Source File: JsonBinaryType.java    From dhis2-core with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
@Override
public Object deepCopy( Object value ) throws HibernateException
{
    if ( value == null )
    {
        return null;
    }

    final TokenBuffer tb = new TokenBuffer( resultingMapper, false );
    try
    {
        writeValue( tb, value );
        return readValue( tb.asParser() );
    }
    catch ( IOException e )
    {
        throw new HibernateException( "Could not deep copy JSONB object.", e );
    }
}
 
Example #13
Source File: Jackson2TokenizerTests.java    From spring-analysis-note with MIT License 6 votes vote down vote up
private void testTokenize(List<String> source, List<String> expected, boolean tokenizeArrayElements) {
	Flux<TokenBuffer> tokens = Jackson2Tokenizer.tokenize(
			Flux.fromIterable(source).map(this::stringBuffer),
			this.jsonFactory, this.objectMapper, tokenizeArrayElements);

	Flux<String> result = tokens
			.map(tokenBuffer -> {
				try {
					TreeNode root = this.objectMapper.readTree(tokenBuffer.asParser());
					return this.objectMapper.writeValueAsString(root);
				}
				catch (IOException ex) {
					throw new UncheckedIOException(ex);
				}
			});

	StepVerifier.FirstStep<String> builder = StepVerifier.create(result);
	expected.forEach(s -> builder.assertNext(new JSONAssertConsumer(s)));
	builder.verifyComplete();
}
 
Example #14
Source File: JacksonMarshaller.java    From nexus-public with Eclipse Public License 1.0 6 votes vote down vote up
@Override
public <T> T unmarshall(final Object marshalled, final Class<T> type) throws Exception {
  checkNotNull(marshalled);
  checkState(marshalled instanceof Map, "Marshalled data must be a Map; found: %s", marshalled.getClass());

  // FIXME: This allows the top-level object to be created, but if any children objects of this are missing
  // FIXME: ... no-arg CTOR then Jackson will fail to construct them.
  // FIXME: Is there any way to configure the basic instance creation for Jackson?
  Object value = instanceCreator.newInstance(type);

  // performs same basic logic as ObjectMapper.convertValue(Object, Class) helper
  ObjectReader reader = objectMapper.readerForUpdating(value);
  TokenBuffer buff = new TokenBuffer(objectMapper, false);
  objectMapper.writeValue(buff, marshalled);
  reader.readValue(buff.asParser());

  return type.cast(value);
}
 
Example #15
Source File: Jackson2Tokenizer.java    From spring-analysis-note with MIT License 6 votes vote down vote up
/**
 * Tokenize the given {@code Flux<DataBuffer>} into {@code Flux<TokenBuffer>}.
 * @param dataBuffers the source data buffers
 * @param jsonFactory the factory to use
 * @param objectMapper the current mapper instance
 * @param tokenizeArrayElements if {@code true} and the "top level" JSON object is
 * an array, each element is returned individually immediately after it is received
 * @return the resulting token buffers
 */
public static Flux<TokenBuffer> tokenize(Flux<DataBuffer> dataBuffers, JsonFactory jsonFactory,
		ObjectMapper objectMapper, boolean tokenizeArrayElements) {

	try {
		JsonParser parser = jsonFactory.createNonBlockingByteArrayParser();
		DeserializationContext context = objectMapper.getDeserializationContext();
		if (context instanceof DefaultDeserializationContext) {
			context = ((DefaultDeserializationContext) context).createInstance(
					objectMapper.getDeserializationConfig(), parser, objectMapper.getInjectableValues());
		}
		Jackson2Tokenizer tokenizer = new Jackson2Tokenizer(parser, context, tokenizeArrayElements);
		return dataBuffers.concatMapIterable(tokenizer::tokenize).concatWith(tokenizer.endOfInput());
	}
	catch (IOException ex) {
		return Flux.error(ex);
	}
}
 
Example #16
Source File: Jackson2Tokenizer.java    From spring-analysis-note with MIT License 6 votes vote down vote up
private List<TokenBuffer> parseTokenBufferFlux() throws IOException {
	List<TokenBuffer> result = new ArrayList<>();

	while (true) {
		JsonToken token = this.parser.nextToken();
		// SPR-16151: Smile data format uses null to separate documents
		if (token == JsonToken.NOT_AVAILABLE ||
				(token == null && (token = this.parser.nextToken()) == null)) {
			break;
		}
		updateDepth(token);
		if (!this.tokenizeArrayElements) {
			processTokenNormal(token, result);
		}
		else {
			processTokenArray(token, result);
		}
	}
	return result;
}
 
Example #17
Source File: DrillValuesRelBase.java    From Bats with Apache License 2.0 6 votes vote down vote up
private static JsonNode convertToJsonNode(RelDataType rowType, List<? extends List<RexLiteral>> tuples) throws IOException {
  TokenBuffer out = new TokenBuffer(MAPPER.getFactory().getCodec(), false);
  JsonOutput json = new ExtendedJsonOutput(out);
  json.writeStartArray();
  String[] fields = rowType.getFieldNames().toArray(new String[rowType.getFieldCount()]);

  for (List<RexLiteral> row : tuples) {
    json.writeStartObject();
    int i = 0;
    for (RexLiteral field : row) {
      json.writeFieldName(fields[i]);
      writeLiteral(field, json);
      i++;
    }
    json.writeEndObject();
  }
  json.writeEndArray();
  json.flush();
  return out.asParser().readValueAsTree();
}
 
Example #18
Source File: OrientHttpClientConfigurationEntityAdapter.java    From nexus-public with Eclipse Public License 1.0 5 votes vote down vote up
@Override
protected void readFields(final ODocument document, final OrientHttpClientConfiguration entity) throws Exception {
  ObjectReader reader = objectMapper.readerForUpdating(entity);
  TokenBuffer buff = new TokenBuffer(objectMapper, false);
  Map<String, Object> fields = document.toMap();

  // strip out id/class synthetics
  fields.remove("@rid");
  fields.remove("@class");

  log.trace("Reading fields: {}", fields);
  objectMapper.writeValue(buff, fields);
  reader.readValue(buff.asParser());
}
 
Example #19
Source File: UnwrappedPropertyHandler.java    From lams with GNU General Public License v2.0 5 votes vote down vote up
@SuppressWarnings("resource")
public Object processUnwrapped(JsonParser originalParser, DeserializationContext ctxt,
        Object bean, TokenBuffer buffered)
    throws IOException
{
    for (int i = 0, len = _properties.size(); i < len; ++i) {
        SettableBeanProperty prop = _properties.get(i);
        JsonParser p = buffered.asParser();
        p.nextToken();
        prop.deserializeAndSet(p, ctxt, bean);
    }
    return bean;
}
 
Example #20
Source File: CopyUtil.java    From blynk-server with GNU General Public License v3.0 5 votes vote down vote up
public static DashBoard deepCopy(DashBoard dash) {
    if (dash == null) {
        return null;
    }
    try {
        TokenBuffer tb = new TokenBuffer(JsonParser.MAPPER, false);
        JsonParser.MAPPER.writeValue(tb, dash);
        return JsonParser.MAPPER.readValue(tb.asParser(), DashBoard.class);
    } catch (Exception e) {
        log.error("Error during deep copy of dashboard. Reason : {}", e.getMessage());
        log.debug(e);
    }
    return null;
}
 
Example #21
Source File: ExternalTypeHandler.java    From lams with GNU General Public License v2.0 5 votes vote down vote up
protected ExternalTypeHandler(ExternalTypeHandler h)
{
    _beanType = h._beanType;
    _properties = h._properties;
    _nameToPropertyIndex = h._nameToPropertyIndex;
    int len = _properties.length;
    _typeIds = new String[len];
    _tokens = new TokenBuffer[len];
}
 
Example #22
Source File: CopyUtil.java    From blynk-server with GNU General Public License v3.0 5 votes vote down vote up
public static Profile deepCopy(Profile profile) {
    if (profile == null) {
        return null;
    }
    try {
        TokenBuffer tb = new TokenBuffer(JsonParser.MAPPER, false);
        JsonParser.MAPPER.writeValue(tb, profile);
        return JsonParser.MAPPER.readValue(tb.asParser(), Profile.class);
    } catch (Exception e) {
        log.error("Error during deep copy of profile. Reason : {}", e.getMessage());
        log.debug(e);
    }
    return null;
}
 
Example #23
Source File: ExternalTypeHandler.java    From lams with GNU General Public License v2.0 5 votes vote down vote up
protected ExternalTypeHandler(JavaType beanType,
        ExtTypedProperty[] properties,
        Map<String, Object> nameToPropertyIndex,
        String[] typeIds, TokenBuffer[] tokens)
{
    _beanType = beanType;
    _properties = properties;
    _nameToPropertyIndex = nameToPropertyIndex;
    _typeIds = typeIds;
    _tokens = tokens;
}
 
Example #24
Source File: BuilderBasedDeserializer.java    From lams with GNU General Public License v2.0 5 votes vote down vote up
protected Object deserializeWithUnwrapped(JsonParser p,
        DeserializationContext ctxt, Object builder, TokenBuffer tokens)
    throws IOException
{
    final Class<?> activeView = _needViewProcesing ? ctxt.getActiveView() : null;
    for (JsonToken t = p.getCurrentToken(); t == JsonToken.FIELD_NAME; t = p.nextToken()) {
        String propName = p.getCurrentName();
        SettableBeanProperty prop = _beanProperties.find(propName);
        p.nextToken();
        if (prop != null) { // normal case
            if (activeView != null && !prop.visibleInView(activeView)) {
                p.skipChildren();
                continue;
            }
            try {
                builder = prop.deserializeSetAndReturn(p, ctxt, builder);
            } catch (Exception e) {
                wrapAndThrow(e, builder, propName, ctxt);
            }
            continue;
        }
        if (_ignorableProps != null && _ignorableProps.contains(propName)) {
            handleIgnoredProperty(p, ctxt, builder, propName);
            continue;
        }
        // but... others should be passed to unwrapped property deserializers
        tokens.writeFieldName(propName);
        tokens.copyCurrentStructure(p);
        // how about any setter? We'll get copies but...
        if (_anySetter != null) {
            _anySetter.deserializeAndSet(p, ctxt, builder, propName);
        }
    }
    tokens.writeEndObject();
    return _unwrappedPropertyHandler.processUnwrapped(p, ctxt, builder, tokens);
}
 
Example #25
Source File: JsonParserReader.java    From immutables with Apache License 2.0 5 votes vote down vote up
/**
 * Reads current value including objects and array as effiecient token buffer.
 * Use of Jackson's own mechanisms is important to preserve custom elements
 * such as special embedded objects in BSON or other data formats.
 * @return {@link TokenBuffer}
 * @throws IOException if error occured
 */
public final TokenBuffer nextTokenBuffer() throws IOException {
  TokenBuffer buffer = new TokenBuffer(parser);
  // if token is consumed, but undelying parser is still sitting on this token, we move forward
  requirePeek();
  buffer.copyCurrentStructure(parser);
  // when we will return to reading from reader, state will be cleared and nextToken after
  clearPeek();
  return buffer;
}
 
Example #26
Source File: UUIDSerializer.java    From lams with GNU General Public License v2.0 5 votes vote down vote up
@Override
public void serialize(UUID value, JsonGenerator gen, SerializerProvider provider)
    throws IOException
{
    // First: perhaps we could serialize it as raw binary data?
    if (gen.canWriteBinaryNatively()) {
        /* 07-Dec-2013, tatu: One nasty case; that of TokenBuffer. While it can
         *   technically retain binary data, we do not want to do use binary
         *   with it, as that results in UUIDs getting converted to Base64 for
         *   most conversions.
         */
        if (!(gen instanceof TokenBuffer)) {
            gen.writeBinary(_asBytes(value));
            return;
        }
    }
    
    // UUID.toString() works ok functionally, but we can make it go much faster
    // (by 4x with micro-benchmark)

    final char[] ch = new char[36];
    final long msb = value.getMostSignificantBits();
    _appendInt((int) (msb >> 32), ch, 0);
    ch[8] = '-';
    int i = (int) msb;
    _appendShort(i >>> 16, ch, 9);
    ch[13] = '-';
    _appendShort(i, ch, 14);
    ch[18] = '-';

    final long lsb = value.getLeastSignificantBits();
    _appendShort((int) (lsb >>> 48), ch, 19);
    ch[23] = '-';
    _appendShort((int) (lsb >>> 32), ch, 24);
    _appendInt((int) lsb, ch, 28);

    gen.writeString(ch, 0, 36);
}
 
Example #27
Source File: Jackson2Tokenizer.java    From java-technology-stack with MIT License 5 votes vote down vote up
private void processTokenArray(JsonToken token, List<TokenBuffer> result) throws IOException {
	if (!isTopLevelArrayToken(token)) {
		this.tokenBuffer.copyCurrentEvent(this.parser);
	}

	if (this.objectDepth == 0 &&
			(this.arrayDepth == 0 || this.arrayDepth == 1) &&
			(token == JsonToken.END_OBJECT || token.isScalarValue())) {
		result.add(this.tokenBuffer);
		this.tokenBuffer = new TokenBuffer(this.parser);
	}
}
 
Example #28
Source File: Jackson2Tokenizer.java    From spring-analysis-note with MIT License 5 votes vote down vote up
private Jackson2Tokenizer(
		JsonParser parser, DeserializationContext deserializationContext, boolean tokenizeArrayElements) {

	this.parser = parser;
	this.deserializationContext = deserializationContext;
	this.tokenizeArrayElements = tokenizeArrayElements;
	this.tokenBuffer = new TokenBuffer(parser, deserializationContext);
	this.inputFeeder = (ByteArrayFeeder) this.parser.getNonBlockingInputFeeder();
}
 
Example #29
Source File: Jackson2Tokenizer.java    From spring-analysis-note with MIT License 5 votes vote down vote up
private void processTokenNormal(JsonToken token, List<TokenBuffer> result) throws IOException {
	this.tokenBuffer.copyCurrentEvent(this.parser);

	if ((token.isStructEnd() || token.isScalarValue()) && this.objectDepth == 0 && this.arrayDepth == 0) {
		result.add(this.tokenBuffer);
		this.tokenBuffer = new TokenBuffer(this.parser, this.deserializationContext);
	}

}
 
Example #30
Source File: Jackson2Tokenizer.java    From spring-analysis-note with MIT License 5 votes vote down vote up
private void processTokenArray(JsonToken token, List<TokenBuffer> result) throws IOException {
	if (!isTopLevelArrayToken(token)) {
		this.tokenBuffer.copyCurrentEvent(this.parser);
	}

	if (this.objectDepth == 0 && (this.arrayDepth == 0 || this.arrayDepth == 1) &&
			(token == JsonToken.END_OBJECT || token.isScalarValue())) {
		result.add(this.tokenBuffer);
		this.tokenBuffer = new TokenBuffer(this.parser, this.deserializationContext);
	}
}