Java Code Examples for com.fasterxml.jackson.databind.DeserializationContext#findInjectableValue()

The following examples show how to use com.fasterxml.jackson.databind.DeserializationContext#findInjectableValue() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CaseModule.java    From testrail-api-java-client with MIT License 6 votes vote down vote up
@Override
public Case deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
    Case testCase = (Case) defaultDeserializer.deserialize(jsonParser, deserializationContext);

    ObjectMapper mapper = (ObjectMapper) jsonParser.getCodec();
    List<CaseField> caseFieldList = (List<CaseField>) deserializationContext.findInjectableValue(Case.class.toString(), null, null);
    Map<String, CaseField> caseFields = Maps.uniqueIndex(caseFieldList, new Function<CaseField, String>() {
        @Override
        public String apply(final CaseField caseField) {
            return caseField.getName();
        }
    });
    Map<String, Object> customFields = new HashMap<>(testCase.getCustomFields().size());
    for (Map.Entry<String, Object> customField : testCase.getCustomFields().entrySet()) {
        checkArgument(caseFields.containsKey(customField.getKey()), "Case field list configuration is possibly outdated since it does not contain custom field: " + customField.getKey());
        customFields.put(customField.getKey(), mapper.convertValue(customField.getValue(), Field.Type.getType(caseFields.get(customField.getKey()).getTypeId()).getTypeReference()));
    }
    testCase.setCustomFields(customFields);
    return testCase;
}
 
Example 2
Source File: ResultModule.java    From testrail-api-java-client with MIT License 6 votes vote down vote up
@Override
public Result deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
    Result result = (Result) defaultDeserializer.deserialize(jsonParser, deserializationContext);

    ObjectMapper mapper = (ObjectMapper) jsonParser.getCodec();
    List<ResultField> resultFieldList = (List<ResultField>) deserializationContext.findInjectableValue(Result.class.toString(), null, null);
    Map<String, ResultField> resultFields = Maps.uniqueIndex(resultFieldList, new Function<ResultField, String>() {
        @Override
        public String apply(final ResultField resultField) {
            return resultField.getName();
        }
    });
    Map<String, Object> customFields = new HashMap<>(result.getCustomFields().size());
    for (Map.Entry<String, Object> customField : result.getCustomFields().entrySet()) {
        checkArgument(resultFields.containsKey(customField.getKey()), "Result field list configuration is possibly outdated since it does not contain custom field: " + customField.getKey());
        customFields.put(customField.getKey(), mapper.convertValue(customField.getValue(), Field.Type.getType(resultFields.get(customField.getKey()).getTypeId()).getTypeReference()));
    }
    result.setCustomFields(customFields);
    return result;
}
 
Example 3
Source File: HiveProxyingSubScanDeserializer.java    From dremio-oss with Apache License 2.0 5 votes vote down vote up
@Override
public HiveProxyingSubScan deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
  final JsonNode node = jsonParser.getCodec().readTree(jsonParser);

  final String pluginName = node.get("pluginName").asText();
  final CatalogService catalogService = (CatalogService) deserializationContext
    .findInjectableValue(CatalogService.class.getName(), null, null);

  final StoragePluginCreator.PF4JStoragePlugin plugin = catalogService.getSource(pluginName);
  final Class<? extends HiveProxiedSubScan> scanClazz = plugin.getSubScanClass();

  final JavaType scanType = deserializationContext.getTypeFactory().constructType(scanClazz);
  final BasicBeanDescription description = deserializationContext.getConfig().introspect(scanType);
  final JsonDeserializer<Object> subScanDeserializer = deserializationContext.getFactory().createBeanDeserializer(
    deserializationContext, scanType, description);

  if (subScanDeserializer instanceof ResolvableDeserializer) {
    ((ResolvableDeserializer) subScanDeserializer).resolve(deserializationContext);
  }

  final JsonParser movedParser = jsonParser.getCodec().treeAsTokens(node.get("wrappedHiveScan"));
  deserializationContext.getConfig().initialize(movedParser);

  if (movedParser.getCurrentToken() == null) {
    movedParser.nextToken();
  }

  final HiveProxiedSubScan scan = (HiveProxiedSubScan) subScanDeserializer.deserialize(movedParser, deserializationContext);
  return new HiveProxyingSubScan(pluginName, scan);
}
 
Example 4
Source File: HiveProxyingOrcScanFilterDeserializer.java    From dremio-oss with Apache License 2.0 5 votes vote down vote up
@Override
public HiveProxyingOrcScanFilter deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
  // TODO: Optimize performance as described in https://dremio.atlassian.net/browse/DX-17732
  final JsonNode node = jsonParser.getCodec().readTree(jsonParser);

  final String pluginName = node.get(HiveProxyingOrcScanFilter.JSON_PROP_PLUGINNAME).asText();
  final CatalogService catalogService = (CatalogService) deserializationContext
    .findInjectableValue(CatalogService.class.getName(), null, null);

  final StoragePluginCreator.PF4JStoragePlugin plugin = catalogService.getSource(pluginName);
  final Class<? extends HiveProxiedOrcScanFilter> scanClazz = plugin.getOrcScanFilterClass();

  final JavaType scanType = deserializationContext.getTypeFactory().constructType(scanClazz);
  final BasicBeanDescription description = deserializationContext.getConfig().introspect(scanType);
  final JsonDeserializer<Object> orcScanFilterDeserializer = deserializationContext.getFactory().createBeanDeserializer(
    deserializationContext, scanType, description);

  if (orcScanFilterDeserializer instanceof ResolvableDeserializer) {
    ((ResolvableDeserializer) orcScanFilterDeserializer).resolve(deserializationContext);
  }

  final JsonParser movedParser = jsonParser.getCodec().treeAsTokens(node.get(HiveProxyingOrcScanFilter.JSON_PROP_WRAPPEDHIVEORCSCANFILTER));
  deserializationContext.getConfig().initialize(movedParser);

  if (movedParser.getCurrentToken() == null) {
    movedParser.nextToken();
  }

  final HiveProxiedOrcScanFilter orcScanFilter = (HiveProxiedOrcScanFilter) orcScanFilterDeserializer.deserialize(movedParser, deserializationContext);
  return new HiveProxyingOrcScanFilter(pluginName, orcScanFilter);
}
 
Example 5
Source File: Meta.java    From pegasus with Apache License 2.0 4 votes vote down vote up
/**
 * Deserializes a Transformation YAML description of the type
 *
 * <pre>
 *
 * {
 * "_id": "f.b2",
 * "_type": "file",
 * "_attributes": {
 * "user": "bamboo",
 * "size": "56",
 * "ctime": "2020-05-15T10:05:04-07:00",
 * "checksum.type": "sha256",
 * "checksum.value": "a69fef1a4b597ea5e61ce403b6ef8bb5b4cd3aba19e734bf340ea00f5095c894",
 * "checksum.timing": "0.0"
 * }
 * </pre>
 *
 * @param parser
 * @param dc
 * @return
 * @throws IOException
 * @throws JsonProcessingException
 */
@Override
public ReplicaCatalog deserialize(JsonParser parser, DeserializationContext dc)
        throws IOException, JsonProcessingException {
    ObjectCodec oc = parser.getCodec();
    JsonNode node = oc.readTree(parser);
    Meta metaRC = (Meta) dc.findInjectableValue("callback", null, null);
    if (metaRC == null) {
        throw new ReplicaCatalogException(
                "Callback not initialized when parsing inititated");
    }
    if (!node.isArray()) {
        throw new ReplicaCatalogException("The meta file should be array of entries");
    }

    for (JsonNode replicaNode : node) {

        String lfn = null;
        String type = null;
        ReplicaCatalogEntry rce = null;
        for (Iterator<Map.Entry<String, JsonNode>> it = replicaNode.fields();
                it.hasNext(); ) {
            Map.Entry<String, JsonNode> e = it.next();
            String key = e.getKey();
            MetaKeywords reservedKey = MetaKeywords.getReservedKey(key);
            if (reservedKey == null) {
                this.complainForIllegalKey(MetaKeywords.META.getReservedName(), key, node);
            }

            switch (reservedKey) {
                case ID:
                    lfn = replicaNode.get(key).asText();
                    break;

                case ATTRIBUTES:
                    rce = this.createReplicaCatalogEntry(replicaNode.get(key));
                    break;

                case TYPE:
                    type = replicaNode.get(key).asText();
                    break;

                default:
                    this.complainForUnsupportedKey(
                            MetaKeywords.META.getReservedName(), key, node);
            }
        }
        if (lfn == null) {
            throw new ReplicaCatalogException("LFN not specified for node " + replicaNode);
        }
        if (rce == null) {
            throw new ReplicaCatalogException(
                    "Attributes not specified for node " + replicaNode);
        }
        metaRC.insert(lfn, rce);
    }

    return metaRC;
}
 
Example 6
Source File: YAML.java    From pegasus with Apache License 2.0 4 votes vote down vote up
/**
 * Deserializes a Replica Catalog representation YAML description of the type
 *
 * <pre>
 *  pegasus: "5.0"
 *  replicas:
 *    - lfn: f1
 *      pfns:
 *        - site: local
 *          pfn: /path/to/file
 *        - site: condorpool
 *          pfn: /path/to/file
 *      checksum:
 *        sha256: abc123
 *      metadata:
 *        owner: vahi
 *        size: 1024
 *    - lfn: f2
 *      pfns:
 *        - site: local
 *          pfn: /path/to/file
 *        - site: condorpool
 *          pfn: /path/to/file
 *      checksum:
 *        sha256: 991232132abc
 *      metadata:
 *        owner: pegasus
 *        size: 1024
 *    - lfn: .*\.gz
 *      pfns:
 *        - site: local
 *          pfn: input/mono/[0]
 *          # cant have checksum
 *      metadata:
 *        owner: pegasus
 *        regex: true
 * </pre>
 *
 * @param parser
 * @param dc
 * @return
 * @throws IOException
 * @throws JsonProcessingException
 */
@Override
public ReplicaCatalog deserialize(JsonParser parser, DeserializationContext dc)
        throws IOException, JsonProcessingException {
    ObjectCodec oc = parser.getCodec();
    JsonNode node = oc.readTree(parser);
    YAML yamlRC = (YAML) dc.findInjectableValue("callback", null, null);
    if (yamlRC == null) {
        throw new RuntimeException("Callback not initialized when parsing inititated");
    }
    for (Iterator<Map.Entry<String, JsonNode>> it = node.fields(); it.hasNext(); ) {
        Map.Entry<String, JsonNode> e = it.next();
        String key = e.getKey();
        ReplicaCatalogKeywords reservedKey = ReplicaCatalogKeywords.getReservedKey(key);
        if (reservedKey == null) {
            this.complainForIllegalKey(
                    ReplicaCatalogKeywords.REPLICAS.getReservedName(), key, node);
        }

        String keyValue = node.get(key).asText();
        switch (reservedKey) {
            case PEGASUS:
                yamlRC.setVersion(keyValue);
                break;

            case REPLICAS:
                JsonNode replicaNodes = node.get(key);
                if (replicaNodes != null) {
                    if (replicaNodes.isArray()) {
                        for (JsonNode replicaNode : replicaNodes) {
                            parser = replicaNode.traverse(oc);
                            ReplicaLocation rl = parser.readValueAs(ReplicaLocation.class);
                            yamlRC.insert(rl);
                        }
                    }
                }
                break;

            default:
                this.complainForUnsupportedKey(
                        ReplicaCatalogKeywords.REPLICAS.getReservedName(), key, node);
        }
    }

    return yamlRC;
}