Java Code Examples for org.codehaus.jackson.map.ObjectMapper#createObjectNode()

The following examples show how to use org.codehaus.jackson.map.ObjectMapper#createObjectNode() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: BeamSqlLineIT.java    From beam with Apache License 2.0 6 votes vote down vote up
private String taxiRideJSON(
    String rideId,
    int pointIdex,
    double latitude,
    double longitude,
    int meterReading,
    int meterIncrement,
    String rideStatus,
    int passengerCount) {
  ObjectMapper mapper = new ObjectMapper();
  ObjectNode objectNode = mapper.createObjectNode();
  objectNode.put("ride_id", rideId);
  objectNode.put("point_idx", pointIdex);
  objectNode.put("latitude", latitude);
  objectNode.put("longitude", longitude);
  objectNode.put("meter_reading", meterReading);
  objectNode.put("meter_increment", meterIncrement);
  objectNode.put("ride_status", rideStatus);
  objectNode.put("passenger_count", passengerCount);
  return objectNode.toString();
}
 
Example 2
Source File: PluginBusinessServiceImpl.java    From seldon-server with Apache License 2.0 6 votes vote down vote up
@Override
public JsonNode telepath_tag_prediction(final ConsumerBean consumerBean,final String client_item_id,final String attrName) 
{
	
	PluginProvider telepathProvider = findPlugin(consumerBean.getShort_name(), PLUGIN_TELEPATH_NAME);
	if (telepathProvider != null)
	{
		// get item and text
		ItemBean item = itemService.getItem(consumerBean, client_item_id, true);
		String text = item.getAttributesName().get(attrName);
		ObjectMapper mapper = new ObjectMapper();
		ObjectNode dataTable = mapper.createObjectNode();
		dataTable.put("text", text);
		OptionsHolder optsHolder = new OptionsHolder(defaultOptions, telepathProvider.config);
		return telepathProvider.pluginService.execute(dataTable,optsHolder);
	}
	else
		throw new APIException(APIException.PLUGIN_NOT_ENABLED);
}
 
Example 3
Source File: WebModelServiceImpl.java    From fixflow with Apache License 2.0 6 votes vote down vote up
/**
 * 二次请求模型信息
 */
public void reTryModelInfo(){
	PrintWriter out = null;
	try {
		FixFlowConverter fixFlowConverter = new FixFlowConverter();
		
   		ObjectMapper objectMapper = new ObjectMapper();
           InputStream input = new FileInputStream(buildPath() +File.separator+request.getParameter("fileName")); 
           Definitions definitions = fixFlowConverter.getDefinitions("process_1", input);
           Process process = (Process)definitions.getRootElements().get(0);
   		ObjectNode on = fixFlowConverter.convertDefinitions2Json(definitions);
   		ObjectNode rootNode = objectMapper.createObjectNode();
   		rootNode.put("name", process.getName());
   		rootNode.put("description", process.getName());
   		rootNode.put("modelId", process.getId());
   		rootNode.put("model", on);
   		out = response.getWriter();
   		out.print(rootNode);
	} catch (Exception e) {
		throw new RuntimeException(e);
	}
	
}
 
Example 4
Source File: TestOperators.java    From Cubert with Apache License 2.0 5 votes vote down vote up
@Test
// when there are multiple rows in one table
public void testHashJoin2() throws JsonGenerationException,
        JsonMappingException,
        IOException,
        InterruptedException
{
    Object[][] rows1 = { { 2 }, { 2 }, { 5 }, { 10 } };
    Object[][] rows2 = { { 1 }, { 7 }, { 9 } };
    Object[][] expected = {};

    Block block1 = new ArrayBlock(Arrays.asList(rows1), new String[] { "a" });
    Block block2 = new ArrayBlock(Arrays.asList(rows2), new String[] { "a" });

    TupleOperator operator = new HashJoinOperator();
    Map<String, Block> input = new HashMap<String, Block>();
    input.put("block1", block1);
    input.put("block2", block2);

    ObjectMapper mapper = new ObjectMapper();
    ObjectNode node = mapper.createObjectNode();
    node.put("leftJoinKeys", "a");
    node.put("rightJoinKeys", "a");
    node.put("leftBlock", "block1");

    BlockProperties props =
            new BlockProperties(null,
                                new BlockSchema("INT block1___a, INT block2___a"),
                                (BlockProperties) null);
    operator.setInput(input, node, props);

    Block output = new TupleOperatorBlock(operator, props);

    ArrayBlock.assertData(output, expected, new String[] { "block1.a", "block2.a" });
}
 
Example 5
Source File: ActModelService.java    From Shop-for-JavaWeb with MIT License 5 votes vote down vote up
/**
 * 创建模型
 * @throws UnsupportedEncodingException 
 */
public org.activiti.engine.repository.Model create(String name, String key, String description, String category) throws UnsupportedEncodingException {
	ObjectMapper objectMapper = new ObjectMapper();
	ObjectNode editorNode = objectMapper.createObjectNode();
	editorNode.put("id", "canvas");
	editorNode.put("resourceId", "canvas");
	ObjectNode stencilSetNode = objectMapper.createObjectNode();
	stencilSetNode.put("namespace", "http://b3mn.org/stencilset/bpmn2.0#");
	editorNode.put("stencilset", stencilSetNode);
	org.activiti.engine.repository.Model modelData = repositoryService.newModel();

	description = StringUtils.defaultString(description);
	modelData.setKey(StringUtils.defaultString(key));
	modelData.setName(name);
	modelData.setCategory(category);
	modelData.setVersion(Integer.parseInt(String.valueOf(repositoryService.createModelQuery().modelKey(modelData.getKey()).count()+1)));

	ObjectNode modelObjectNode = objectMapper.createObjectNode();
	modelObjectNode.put(ModelDataJsonConstants.MODEL_NAME, name);
	modelObjectNode.put(ModelDataJsonConstants.MODEL_REVISION, modelData.getVersion());
	modelObjectNode.put(ModelDataJsonConstants.MODEL_DESCRIPTION, description);
	modelData.setMetaInfo(modelObjectNode.toString());
	
	repositoryService.saveModel(modelData);
	repositoryService.addModelEditorSource(modelData.getId(), editorNode.toString().getBytes("utf-8"));
	
	return modelData;
}
 
Example 6
Source File: TestOperators.java    From Cubert with Apache License 2.0 5 votes vote down vote up
@Test
public void testCombinedBlockLeftRunsout() throws JsonGenerationException,
        JsonMappingException,
        IOException,
        InterruptedException
{
    Object[][] rows1 = { { 2 }, { 7 }, { 9 } };
    Object[][] rows2 = { { 3 }, { 5 }, { 10 } };

    Block block1 = new ArrayBlock(Arrays.asList(rows1), new String[] { "a" }, 1);
    Block block2 = new ArrayBlock(Arrays.asList(rows2), new String[] { "a" }, 1);

    TupleOperator operator = new CombineOperator();
    Map<String, Block> input = new HashMap<String, Block>();
    input.put("cube1", block1);
    input.put("cube2", block2);

    ObjectMapper mapper = new ObjectMapper();
    ObjectNode node = mapper.createObjectNode();
    JsonNode ct = mapper.readValue("[\"a\"]", JsonNode.class);
    node.put("pivotBy", ct);

    BlockProperties props =
            new BlockProperties(null,
                                new BlockSchema("INT a"),
                                (BlockProperties) null);
    operator.setInput(input, node, props);

    Block output = new TupleOperatorBlock(operator, props);

    ArrayBlock.assertData(output, new Object[][] { { 2 }, { 3 }, { 5 }, { 7 }, { 9 },
            { 10 } }, new String[] { "a" });
}
 
Example 7
Source File: TestOperators.java    From Cubert with Apache License 2.0 5 votes vote down vote up
@Test
// when there are multiple rows in one table
public void testMergeJoin4() throws JsonGenerationException,
        JsonMappingException,
        IOException,
        InterruptedException
{
    Object[][] rows1 = { { 0 }, { 2 }, { 2 }, { 5 }, { 10 }, { 100 } };
    Object[][] rows2 = { { 1 }, { 2 }, { 7 }, { 9 }, { 100 }, { 100 } };
    Object[][] expected = { { 2, 2 }, { 2, 2 }, { 100, 100 }, { 100, 100 } };

    Block block1 = new ArrayBlock(Arrays.asList(rows1), new String[] { "a" });
    Block block2 = new ArrayBlock(Arrays.asList(rows2), new String[] { "a" });

    TupleOperator operator = new MergeJoinOperator();
    Map<String, Block> input = new HashMap<String, Block>();
    input.put("block1", block1);
    input.put("block2", block2);

    ObjectMapper mapper = new ObjectMapper();
    ObjectNode node = mapper.createObjectNode();
    node.put("leftCubeColumns", "a");
    node.put("rightCubeColumns", "a");
    node.put("leftBlock", "block1");

    BlockProperties props =
            new BlockProperties(null,
                                new BlockSchema("INT block1___a, INT block2___a"),
                                (BlockProperties) null);
    operator.setInput(input, node, props);

    Block output = new TupleOperatorBlock(operator, props);

    ArrayBlock.assertData(output, expected, new String[] { "block1.a", "block2.a" });
}
 
Example 8
Source File: TestOperators.java    From Cubert with Apache License 2.0 5 votes vote down vote up
@Test
public void testGroupByWithSum1() throws JsonGenerationException,
        JsonMappingException,
        IOException,
        InterruptedException
{
    Object[][] rows1 = { { 0 }, { 2 }, { 2 }, { 5 }, { 10 }, { 100 } };
    Block block = new ArrayBlock(Arrays.asList(rows1), new String[] { "a" }, 1);

    TupleOperator operator = new GroupByOperator();
    Map<String, Block> input = new HashMap<String, Block>();
    input.put("first", block);

    ObjectMapper mapper = new ObjectMapper();
    ObjectNode json = mapper.createObjectNode();
    json.put("input", "first");
    ArrayNode anode = mapper.createArrayNode();
    anode.add("a");
    json.put("groupBy", anode);
    anode = mapper.createArrayNode();
    ObjectNode onode = mapper.createObjectNode();
    onode.put("type", "SUM");
    onode.put("input", "a");
    onode.put("output", "sum");
    anode.add(onode);
    json.put("aggregates", anode);

    BlockProperties props =
            new BlockProperties(null,
                                new BlockSchema("INT a, INT sum"),
                                (BlockProperties) null);
    operator.setInput(input, json, props);

    Block output = new TupleOperatorBlock(operator, props);

    ArrayBlock.assertData(output, new Object[][] { { 0, 0 }, { 2, 4 }, { 5, 5 },
            { 10, 10 }, { 100, 100 } }, new String[] { "a", "sum" });
}
 
Example 9
Source File: TestOperators.java    From Cubert with Apache License 2.0 5 votes vote down vote up
@Test
// when there are multiple rows in one table
public void testMergeJoin2() throws JsonGenerationException,
        JsonMappingException,
        IOException,
        InterruptedException
{
    Object[][] rows1 = { { 2 }, { 2 }, { 5 }, { 10 } };
    Object[][] rows2 = { { 1 }, { 7 }, { 9 } };
    Object[][] expected = {};

    Block block1 = new ArrayBlock(Arrays.asList(rows1), new String[] { "a" });
    Block block2 = new ArrayBlock(Arrays.asList(rows2), new String[] { "a" });

    TupleOperator operator = new MergeJoinOperator();
    Map<String, Block> input = new HashMap<String, Block>();
    input.put("block1", block1);
    input.put("block2", block2);

    ObjectMapper mapper = new ObjectMapper();
    ObjectNode node = mapper.createObjectNode();
    node.put("leftCubeColumns", "a");
    node.put("rightCubeColumns", "a");
    node.put("leftBlock", "block1");

    BlockProperties props =
            new BlockProperties(null,
                                new BlockSchema("INT block1___a, INT block2___a"),
                                (BlockProperties) null);
    operator.setInput(input, node, props);

    Block output = new TupleOperatorBlock(operator, props);

    ArrayBlock.assertData(output, expected, new String[] { "block1.a", "block2.a" });
}
 
Example 10
Source File: TestOperators.java    From Cubert with Apache License 2.0 5 votes vote down vote up
@Test
public void testSortOperator() throws JsonGenerationException,
        JsonMappingException,
        IOException,
        InterruptedException
{
    System.out.println("Testing SORT operator");

    Object[][] rows1 =
            { { 0, 10, 0 }, { 2, 5, 2 }, { 2, 8, 5 }, { 5, 9, 6 }, { 10, 11, 1 },
                    { 100, 6, 10 } };
    Block block =
            new ArrayBlock(Arrays.asList(rows1), new String[] { "a", "b", "c" }, 1);

    TupleOperator operator = new SortOperator();
    Map<String, Block> input = new HashMap<String, Block>();
    input.put("unsorted", block);

    ObjectMapper mapper = new ObjectMapper();

    ObjectNode json = mapper.createObjectNode();
    json.put("input", "unsorted");
    ArrayNode anode = mapper.createArrayNode();
    anode.add("b");
    anode.add("c");
    json.put("sortBy", anode);

    BlockProperties props =
            new BlockProperties(null,
                                new BlockSchema("INT a, INT b, INT c"),
                                (BlockProperties) null);
    operator.setInput(input, json, props);

    Block output = new TupleOperatorBlock(operator, props);

    System.out.println("output is " + output);
    ArrayBlock.assertData(output, new Object[][] { { 2, 5, 2 }, { 100, 6, 10 },
            { 2, 8, 5 }, { 5, 9, 6 }, { 0, 10, 0 }, { 10, 11, 1 } }, new String[] {
            "a", "b", "c" });
}
 
Example 11
Source File: TestOperators.java    From Cubert with Apache License 2.0 5 votes vote down vote up
@Test
// when there are multiple rows in one table
public void testMergeJoinFullOuter() throws JsonGenerationException,
        JsonMappingException,
        IOException,
        InterruptedException
{
    Object[][] rows1 = { { 0 }, { 2 }, { 2 }, { 5 }, { 10 }, { 100 } };
    Object[][] rows2 = { { 1 }, { 2 }, { 7 }, { 9 }, { 100 }, { 100 } };
    Object[][] expected =
            { { 0, null }, { null, 1 }, { 2, 2 }, { 2, 2 }, { 5, null }, { null, 7 },
                    { null, 9 }, { 10, null }, { 100, 100 }, { 100, 100 } };

    Block block1 = new ArrayBlock(Arrays.asList(rows1), new String[] { "a" });
    Block block2 = new ArrayBlock(Arrays.asList(rows2), new String[] { "a" });

    TupleOperator operator = new MergeJoinOperator();
    Map<String, Block> input = new HashMap<String, Block>();
    input.put("block1", block1);
    input.put("block2", block2);

    ObjectMapper mapper = new ObjectMapper();
    ObjectNode node = mapper.createObjectNode();
    node.put("leftCubeColumns", "a");
    node.put("rightCubeColumns", "a");
    node.put("leftBlock", "block1");
    node.put("rightBlock", "block2");
    node.put("joinType", "full outer");
    BlockProperties props =
            new BlockProperties(null,
                                new BlockSchema("INT block1___a, INT block2___a"),
                                (BlockProperties) null);
    operator.setInput(input, node, props);

    Block output = new TupleOperatorBlock(operator, props);

    ArrayBlock.assertData(output, expected, new String[] { "block1.a", "block2.a" });
    System.out.println("Successfully tested MERGE JOIN FULL OUTER");
}
 
Example 12
Source File: AssignableInstanceManager.java    From helix with Apache License 2.0 5 votes vote down vote up
public void logQuotaProfileJSON(boolean onlyDisplayIfFull) {
  // Create a String to use as the log for quota status
  ObjectMapper mapper = new ObjectMapper();
  JsonNode instanceNode = mapper.createObjectNode();

  // Loop through all instances
  for (Map.Entry<String, AssignableInstance> instanceEntry : _assignableInstanceMap.entrySet()) {
    AssignableInstance assignableInstance = instanceEntry.getValue();
    boolean capacityFull = false;
    JsonNode resourceTypeNode = mapper.createObjectNode();
    for (Map.Entry<String, Map<String, Integer>> capacityEntry : assignableInstance
        .getTotalCapacity().entrySet()) {
      String resourceType = capacityEntry.getKey();
      Map<String, Integer> quotaTypeMap = capacityEntry.getValue();
      JsonNode quotaTypeNode = mapper.createObjectNode();
      for (Map.Entry<String, Integer> typeEntry : quotaTypeMap.entrySet()) {
        String quotaType = typeEntry.getKey();
        int totalCapacity = typeEntry.getValue();
        int usedCapacity = assignableInstance.getUsedCapacity().get(resourceType).get(quotaType);
        if (!capacityFull) {
          capacityFull = totalCapacity <= usedCapacity;
        }
        String capacityString = String.format("%d/%d", usedCapacity, totalCapacity);
        ((ObjectNode) quotaTypeNode).put(quotaType, capacityString);
      }
      ((ObjectNode) resourceTypeNode).put(resourceType, quotaTypeNode);
    }
    // If onlyDisplayIfFull, do not add the JsonNode to the parent node
    if (onlyDisplayIfFull && !capacityFull) {
      continue;
    }
    ((ObjectNode) instanceNode).put(instanceEntry.getKey(), resourceTypeNode);
  }
  if (instanceNode.size() > 0) {
    LOG.info("Current quota capacity: {}", instanceNode.toString());
  }
}
 
Example 13
Source File: TestOperators.java    From Cubert with Apache License 2.0 5 votes vote down vote up
@Test
// when there are multiple rows in one table
public void testLeftMergeJoin() throws JsonGenerationException,
        JsonMappingException,
        IOException,
        InterruptedException
{
    Object[][] rows1 = { { 0 }, { 2 }, { 2 }, { 5 }, { 10 }, { 100 } };
    Object[][] rows2 = { { 1 }, { 2 }, { 7 }, { 9 }, { 100 }, { 100 } };
    Object[][] expected =
            { { 0, null }, { 2, 2 }, { 2, 2 }, { 5, null }, { 10, null },
                    { 100, 100 }, { 100, 100 } };

    Block block1 = new ArrayBlock(Arrays.asList(rows1), new String[] { "a" });
    Block block2 = new ArrayBlock(Arrays.asList(rows2), new String[] { "a" });

    TupleOperator operator = new MergeJoinOperator();
    Map<String, Block> input = new HashMap<String, Block>();
    input.put("block1", block1);
    input.put("block2", block2);

    ObjectMapper mapper = new ObjectMapper();
    ObjectNode node = mapper.createObjectNode();
    node.put("leftCubeColumns", "a");
    node.put("rightCubeColumns", "a");
    node.put("leftBlock", "block1");
    node.put("rightBlock", "block2");
    node.put("joinType", "left outer");

    BlockProperties props =
            new BlockProperties(null,
                                new BlockSchema("INT block1___a, INT block2___a"),
                                (BlockProperties) null);
    operator.setInput(input, node, props);

    Block output = new TupleOperatorBlock(operator, props);

    ArrayBlock.assertData(output, expected, new String[] { "block1.a", "block2.a" });
}
 
Example 14
Source File: TestDependencyGraph.java    From Cubert with Apache License 2.0 5 votes vote down vote up
DependencyGraph getGraph()
{
    DependencyGraph g = new DependencyGraph();
    ObjectMapper mapper = new ObjectMapper();

    for (String vertex : vertices)
    {
        ObjectNode json = mapper.createObjectNode();
        json.put("name", vertex);
        List<String> parents = edges.get(vertex);
        g.addNode(vertex, parents, json);
    }

    return g;
}
 
Example 15
Source File: TestOperators.java    From Cubert with Apache License 2.0 5 votes vote down vote up
public void testDictionaryEncoding() throws IOException,
        InterruptedException
{
    // create dictionary block
    Object[][] dictRows = { { 1000, 100, 1 }, { 1000, 101, 2 } };
    Block dictionary =
            new ArrayBlock(Arrays.asList(dictRows), new String[] { "colname",
                    "colvalue", "code" });

    // create data block
    Object[][] dataRows = { { 100, 10 }, { 100, 11 }, { 101, 10 } };
    Block dataBlock =
            new ArrayBlock(Arrays.asList(dataRows), new String[] { "1000", "a" });

    // create operator
    Map<String, Block> input = new HashMap<String, Block>();
    input.put("block1", dictionary);
    input.put("block2", dataBlock);
    ObjectMapper mapper = new ObjectMapper();
    ObjectNode node = mapper.createObjectNode();
    node.put("dictionary", "block1");

    TupleOperator operator = new DictionaryEncodeOperator();
    BlockProperties props =
            new BlockProperties(null,
                                new BlockSchema("INT 1000, INT a"),
                                (BlockProperties) null);
    operator.setInput(input, node, props);

    Block output = new TupleOperatorBlock(operator, props);

    Object[][] expected = { { 1, 10 }, { 1, 11 }, { 2, 10 } };

    ArrayBlock.assertData(output, expected, new String[] { "1000", "a" });
}
 
Example 16
Source File: TestOperators.java    From Cubert with Apache License 2.0 5 votes vote down vote up
@Test
public void testGroupByWithSum2() throws JsonGenerationException,
        JsonMappingException,
        IOException,
        InterruptedException
{
    Object[][] rows1 =
            { { 0, 0 }, { 2, 2 }, { 2, 5 }, { 5, 6 }, { 10, 1 }, { 100, 10 } };
    Block block = new ArrayBlock(Arrays.asList(rows1), new String[] { "a", "b" }, 1);

    TupleOperator operator = new GroupByOperator();
    Map<String, Block> input = new HashMap<String, Block>();
    input.put("first", block);

    ObjectMapper mapper = new ObjectMapper();

    ObjectNode json = mapper.createObjectNode();
    json.put("input", "first");
    ArrayNode anode = mapper.createArrayNode();
    anode.add("a");
    json.put("groupBy", anode);
    anode = mapper.createArrayNode();
    ObjectNode onode = mapper.createObjectNode();
    onode.put("type", "SUM");
    onode.put("input", "b");
    onode.put("output", "sum");
    anode.add(onode);
    json.put("aggregates", anode);

    BlockProperties props =
            new BlockProperties(null,
                                new BlockSchema("INT a, INT sum"),
                                (BlockProperties) null);
    operator.setInput(input, json, props);

    Block output = new TupleOperatorBlock(operator, props);

    ArrayBlock.assertData(output, new Object[][] { { 0, 0 }, { 2, 7 }, { 5, 6 },
            { 10, 1 }, { 100, 10 } }, new String[] { "a", "sum" });
}
 
Example 17
Source File: TestSamzaObjectMapper.java    From samza with Apache License 2.0 5 votes vote down vote up
/**
 * Builds {@link ObjectNode} which matches the {@link JobModel} built in setup.
 */
private static ObjectNode buildJobModelJson() {
  ObjectMapper objectMapper = new ObjectMapper();

  ObjectNode configJson = objectMapper.createObjectNode();
  configJson.put("a", "b");

  ObjectNode containerModel1TaskTestSSPJson = objectMapper.createObjectNode();
  containerModel1TaskTestSSPJson.put("system", "foo");
  containerModel1TaskTestSSPJson.put("stream", "bar");
  containerModel1TaskTestSSPJson.put("partition", 1);

  ArrayNode containerModel1TaskTestSSPsJson = objectMapper.createArrayNode();
  containerModel1TaskTestSSPsJson.add(containerModel1TaskTestSSPJson);

  ObjectNode containerModel1TaskTestJson = objectMapper.createObjectNode();
  containerModel1TaskTestJson.put("task-name", "test");
  containerModel1TaskTestJson.put("system-stream-partitions", containerModel1TaskTestSSPsJson);
  containerModel1TaskTestJson.put("changelog-partition", 2);
  containerModel1TaskTestJson.put("task-mode", "Active");

  ObjectNode containerModel1TasksJson = objectMapper.createObjectNode();
  containerModel1TasksJson.put("test", containerModel1TaskTestJson);

  ObjectNode containerModel1Json = objectMapper.createObjectNode();
  // important: needs to be "processor-id" for compatibility between Samza 0.14 and 1.0
  containerModel1Json.put("processor-id", "1");
  containerModel1Json.put("tasks", containerModel1TasksJson);

  ObjectNode containersJson = objectMapper.createObjectNode();
  containersJson.put("1", containerModel1Json);

  ObjectNode jobModelJson = objectMapper.createObjectNode();
  jobModelJson.put("config", configJson);
  jobModelJson.put("containers", containersJson);

  return jobModelJson;
}
 
Example 18
Source File: TestOperators.java    From Cubert with Apache License 2.0 4 votes vote down vote up
@Test
public void testGroupByWithMax() throws JsonGenerationException,
        JsonMappingException,
        IOException,
        InterruptedException
{
    Object[][] rows1 =
            { { 0, 0 }, { 2, 2 }, { 2, 5 }, { 5, 6 }, { 10, 1 }, { 10, 20 },
                    { 100, 10 }, { 100, 1 } };
    Block block = new ArrayBlock(Arrays.asList(rows1), new String[] { "a", "b" }, 1);

    TupleOperator operator = new GroupByOperator();
    Map<String, Block> input = new HashMap<String, Block>();
    input.put("first", block);

    ObjectMapper mapper = new ObjectMapper();

    ObjectNode json = mapper.createObjectNode();
    json.put("input", "first");
    ArrayNode anode = mapper.createArrayNode();
    anode.add("a");
    json.put("groupBy", anode);
    anode = mapper.createArrayNode();
    ObjectNode onode = mapper.createObjectNode();
    onode.put("type", "MAX");
    onode.put("input", "b");
    onode.put("output", "max");
    anode.add(onode);
    json.put("aggregates", anode);

    BlockProperties props =
            new BlockProperties(null,
                                new BlockSchema("INT a, INT max"),
                                (BlockProperties) null);
    operator.setInput(input, json, props);

    Block output = new TupleOperatorBlock(operator, props);

    ArrayBlock.assertData(output, new Object[][] { { 0, 0 }, { 2, 5 }, { 5, 6 },
            { 10, 20 }, { 100, 10 } }, new String[] { "a", "max" });
}
 
Example 19
Source File: TestOperators.java    From Cubert with Apache License 2.0 4 votes vote down vote up
@Test
// testing multiple join keys
public void testMergeJoinMultipleJoinKeys() throws JsonGenerationException,
        JsonMappingException,
        IOException,
        InterruptedException
{
    Object[][] rows1 =
            { { 0, 1 }, { 2, 1 }, { 2, 2 }, { 5, 1 }, { 10, 1 }, { 100, 1 } };
    Object[][] rows2 =
            { { 1, 1 }, { 2, 0 }, { 2, 1 }, { 5, 1 }, { 100, 2 }, { 100, 3 } };
    Object[][] expected = { { 2, 1, 2, 1 }, { 5, 1, 5, 1 } };

    Block block1 = new ArrayBlock(Arrays.asList(rows1), new String[] { "a", "b" });
    Block block2 = new ArrayBlock(Arrays.asList(rows2), new String[] { "c", "a" });

    TupleOperator operator = new MergeJoinOperator();
    Map<String, Block> input = new HashMap<String, Block>();
    input.put("block1", block1);
    input.put("block2", block2);

    ObjectMapper mapper = new ObjectMapper();
    ObjectNode node = mapper.createObjectNode();
    ArrayNode lkeys = mapper.createArrayNode();
    lkeys.add("a");
    lkeys.add("b");
    node.put("leftCubeColumns", lkeys);
    ArrayNode rkeys = mapper.createArrayNode();
    rkeys.add("c");
    rkeys.add("a");
    node.put("rightCubeColumns", rkeys);
    node.put("leftBlock", "block1");

    BlockProperties props =
            new BlockProperties(null,
                                new BlockSchema("INT block1___a, INT block1___b, INT block2___c, INT block2___a"),
                                (BlockProperties) null);
    operator.setInput(input, node, props);

    Block output = new TupleOperatorBlock(operator, props);

    ArrayBlock.assertData(output, expected, new String[] { "block1.a", "block2.a" });
}
 
Example 20
Source File: CubertCombiner.java    From Cubert with Apache License 2.0 4 votes vote down vote up
@Override
public void run(Context context) throws IOException,
        InterruptedException
{
    Configuration conf = context.getConfiguration();
    ObjectMapper mapper = new ObjectMapper();
    JsonNode shuffleJson =
            mapper.readValue(conf.get(CubertStrings.JSON_SHUFFLE), JsonNode.class);

    ObjectNode groupByJson = mapper.createObjectNode();
    groupByJson.put("name", shuffleJson.get("name"));
    groupByJson.put("type", shuffleJson.get("type"));
    groupByJson.put("groupBy", shuffleJson.get("pivotKeys"));
    groupByJson.put("aggregates", shuffleJson.get("aggregates"));

    String[] keyColumns = JsonUtils.asArray(shuffleJson, "pivotKeys");
    BlockSchema fullSchema = new BlockSchema(shuffleJson.get("schema"));
    BlockSchema valueSchema = fullSchema.getComplementSubset(keyColumns);

    CommonContext commonContext = new ReduceContext(context);
    Block input = new ContextBlock(commonContext);
    input.configure(shuffleJson);

    try
    {
        TupleOperator operator =
                OperatorFactory.getTupleOperator(OperatorType.GROUP_BY);

        Map<String, Block> inputMap = new HashMap<String, Block>();
        inputMap.put("groupbyBlock", input);
        BlockProperties props =
                new BlockProperties(null, fullSchema, (BlockProperties) null);
        operator.setInput(inputMap, groupByJson, props);

        String[] valueColumns = valueSchema.getColumnNames();
        Tuple tuple;
        Tuple key = TupleFactory.getInstance().newTuple(keyColumns.length);
        Tuple value = TupleFactory.getInstance().newTuple(valueColumns.length);

        while ((tuple = operator.next()) != null)
        {
            TupleUtils.extractTupleWithReuse(tuple, fullSchema, key, keyColumns);
            TupleUtils.extractTupleWithReuse(tuple, fullSchema, value, valueColumns);
            context.write(key, value);
        }
    }
    // catch this exception here and don't let it propagate to hadoop; if it does,
    // there is a bug in the hadoop code which just hangs the job without killing it.
    catch (Exception e)
    {
        e.printStackTrace();
    }
}