Java Code Examples for org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode#get()

The following examples show how to use org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode#get() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: WebFrontendITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void getTaskManagerLogAndStdoutFiles() {
	try {
		String json = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/");

		ObjectMapper mapper = new ObjectMapper();
		JsonNode parsed = mapper.readTree(json);
		ArrayNode taskManagers = (ArrayNode) parsed.get("taskmanagers");
		JsonNode taskManager = taskManagers.get(0);
		String id = taskManager.get("id").asText();

		WebMonitorUtils.LogFileLocation logFiles = WebMonitorUtils.LogFileLocation.find(CLUSTER_CONFIGURATION);

		//we check for job manager log files, since no separate taskmanager logs exist
		FileUtils.writeStringToFile(logFiles.logFile, "job manager log");
		String logs = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/" + id + "/log");
		assertTrue(logs.contains("job manager log"));

		FileUtils.writeStringToFile(logFiles.stdOutFile, "job manager out");
		logs = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/" + id + "/stdout");
		assertTrue(logs.contains("job manager out"));
	} catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example 2
Source File: WebFrontendITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void getNumberOfTaskManagers() {
	try {
		String json = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/");

		ObjectMapper mapper = new ObjectMapper();
		JsonNode response = mapper.readTree(json);
		ArrayNode taskManagers = (ArrayNode) response.get("taskmanagers");

		assertNotNull(taskManagers);
		assertEquals(NUM_TASK_MANAGERS, taskManagers.size());
	} catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example 3
Source File: WebFrontendITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void getTaskmanagers() throws Exception {
	String json = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/");

	ObjectMapper mapper = new ObjectMapper();
	JsonNode parsed = mapper.readTree(json);
	ArrayNode taskManagers = (ArrayNode) parsed.get("taskmanagers");

	assertNotNull(taskManagers);
	assertEquals(NUM_TASK_MANAGERS, taskManagers.size());

	JsonNode taskManager = taskManagers.get(0);
	assertNotNull(taskManager);
	assertEquals(NUM_SLOTS, taskManager.get("slotsNumber").asInt());
	assertTrue(taskManager.get("freeSlots").asInt() <= NUM_SLOTS);
}
 
Example 4
Source File: WebFrontendITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void getTaskManagerLogAndStdoutFiles() throws Exception {
	String json = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/");

	ObjectMapper mapper = new ObjectMapper();
	JsonNode parsed = mapper.readTree(json);
	ArrayNode taskManagers = (ArrayNode) parsed.get("taskmanagers");
	JsonNode taskManager = taskManagers.get(0);
	String id = taskManager.get("id").asText();

	WebMonitorUtils.LogFileLocation logFiles = WebMonitorUtils.LogFileLocation.find(CLUSTER_CONFIGURATION);

	//we check for job manager log files, since no separate taskmanager logs exist
	FileUtils.writeStringToFile(logFiles.logFile, "job manager log");
	String logs = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/" + id + "/log");
	assertThat(logs, containsString("job manager log"));

	FileUtils.writeStringToFile(logFiles.stdOutFile, "job manager out");
	logs = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/" + id + "/stdout");
	assertThat(logs, containsString("job manager out"));
}
 
Example 5
Source File: FsJobArchivist.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Reads the given archive file and returns a {@link Collection} of contained {@link ArchivedJson}.
 *
 * @param file archive to extract
 * @return collection of archived jsons
 * @throws IOException if the file can't be opened, read or doesn't contain valid json
 */
public static Collection<ArchivedJson> getArchivedJsons(Path file) throws IOException {
	try (FSDataInputStream input = file.getFileSystem().open(file);
		ByteArrayOutputStream output = new ByteArrayOutputStream()) {
		IOUtils.copyBytes(input, output);

		JsonNode archive = mapper.readTree(output.toByteArray());

		Collection<ArchivedJson> archives = new ArrayList<>();
		for (JsonNode archivePart : archive.get(ARCHIVE)) {
			String path = archivePart.get(PATH).asText();
			String json = archivePart.get(JSON).asText();
			archives.add(new ArchivedJson(path, json));
		}
		return archives;
	}
}
 
Example 6
Source File: WebFrontendITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void getTaskManagers() throws Exception {
	String json = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/");

	ObjectMapper mapper = new ObjectMapper();
	JsonNode parsed = mapper.readTree(json);
	ArrayNode taskManagers = (ArrayNode) parsed.get("taskmanagers");

	assertNotNull(taskManagers);
	assertEquals(NUM_TASK_MANAGERS, taskManagers.size());

	JsonNode taskManager = taskManagers.get(0);
	assertNotNull(taskManager);
	assertEquals(NUM_SLOTS, taskManager.get("slotsNumber").asInt());
	assertTrue(taskManager.get("freeSlots").asInt() <= NUM_SLOTS);
}
 
Example 7
Source File: JsonRowSchemaConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
private static TypeInformation<?> convertArray(String location, JsonNode node, JsonNode root) {
	// validate items
	if (!node.has(ITEMS)) {
		throw new IllegalArgumentException(
			"Arrays must specify an '" + ITEMS + "' property in node: " + location);
	}
	final JsonNode items = node.get(ITEMS);

	// list (translated to object array)
	if (items.isObject()) {
		final TypeInformation<?> elementType = convertType(
			location + '/' + ITEMS,
			items,
			root);
		// result type might either be ObjectArrayTypeInfo or BasicArrayTypeInfo for Strings
		return Types.OBJECT_ARRAY(elementType);
	}
	// tuple (translated to row)
	else if (items.isArray()) {
		final TypeInformation<?>[] types = convertTypes(location + '/' + ITEMS, items, root);

		// validate that array does not contain additional items
		if (node.has(ADDITIONAL_ITEMS) && node.get(ADDITIONAL_ITEMS).isBoolean() &&
				node.get(ADDITIONAL_ITEMS).asBoolean()) {
			throw new IllegalArgumentException(
				"An array tuple must not allow additional items in node: " + location);
		}

		return Types.ROW(types);
	}
	throw new IllegalArgumentException(
		"Invalid type for '" + ITEMS + "' property in node: " + location);
}
 
Example 8
Source File: JsonRowSchemaConverter.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static TypeInformation<Row> convertObject(String location, JsonNode node, JsonNode root) {
	// validate properties
	if (!node.has(PROPERTIES)) {
		return Types.ROW();
	}
	if (!node.isObject()) {
		throw new IllegalArgumentException(
			"Invalid '" + PROPERTIES + "' property for object type in node: " + location);
	}
	final JsonNode props = node.get(PROPERTIES);
	final String[] names = new String[props.size()];
	final TypeInformation<?>[] types = new TypeInformation[props.size()];

	final Iterator<Map.Entry<String, JsonNode>> fieldIter = props.fields();
	int i = 0;
	while (fieldIter.hasNext()) {
		final Map.Entry<String, JsonNode> subNode = fieldIter.next();

		// set field name
		names[i] = subNode.getKey();

		// set type
		types[i] = convertType(location + '/' + subNode.getKey(), subNode.getValue(), root);

		i++;
	}

	// validate that object does not contain additional properties
	if (node.has(ADDITIONAL_PROPERTIES) && node.get(ADDITIONAL_PROPERTIES).isBoolean() &&
			node.get(ADDITIONAL_PROPERTIES).asBoolean()) {
		throw new IllegalArgumentException(
			"An object must not allow additional properties in node: " + location);
	}

	return Types.ROW_NAMED(names, types);
}
 
Example 9
Source File: JobDetails.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public JobDetails deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException {

	JsonNode rootNode = jsonParser.readValueAsTree();

	JobID jobId = JobID.fromHexString(rootNode.get(FIELD_NAME_JOB_ID).textValue());
	String jobName = rootNode.get(FIELD_NAME_JOB_NAME).textValue();
	long startTime = rootNode.get(FIELD_NAME_START_TIME).longValue();
	long endTime = rootNode.get(FIELD_NAME_END_TIME).longValue();
	long duration = rootNode.get(FIELD_NAME_DURATION).longValue();
	JobStatus jobStatus = JobStatus.valueOf(rootNode.get(FIELD_NAME_STATUS).textValue());
	long lastUpdateTime = rootNode.get(FIELD_NAME_LAST_MODIFICATION).longValue();

	JsonNode tasksNode = rootNode.get("tasks");
	int numTasks = tasksNode.get(FIELD_NAME_TOTAL_NUMBER_TASKS).intValue();

	int[] numVerticesPerExecutionState = new int[ExecutionState.values().length];

	for (ExecutionState executionState : ExecutionState.values()) {
		numVerticesPerExecutionState[executionState.ordinal()] = tasksNode.get(executionState.name().toLowerCase()).intValue();
	}

	return new JobDetails(
		jobId,
		jobName,
		startTime,
		endTime,
		duration,
		jobStatus,
		lastUpdateTime,
		numVerticesPerExecutionState,
		numTasks);
}
 
Example 10
Source File: WebFrontendITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void getNumberOfTaskManagers() throws Exception {
	String json = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/");

	ObjectMapper mapper = new ObjectMapper();
	JsonNode response = mapper.readTree(json);
	ArrayNode taskManagers = (ArrayNode) response.get("taskmanagers");

	assertNotNull(taskManagers);
	assertEquals(NUM_TASK_MANAGERS, taskManagers.size());
}
 
Example 11
Source File: JsonRowSchemaConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
private static TypeInformation<?> convertArray(String location, JsonNode node, JsonNode root) {
	// validate items
	if (!node.has(ITEMS)) {
		throw new IllegalArgumentException(
			"Arrays must specify an '" + ITEMS + "' property in node: " + location);
	}
	final JsonNode items = node.get(ITEMS);

	// list (translated to object array)
	if (items.isObject()) {
		final TypeInformation<?> elementType = convertType(
			location + '/' + ITEMS,
			items,
			root);
		// result type might either be ObjectArrayTypeInfo or BasicArrayTypeInfo for Strings
		return Types.OBJECT_ARRAY(elementType);
	}
	// tuple (translated to row)
	else if (items.isArray()) {
		final TypeInformation<?>[] types = convertTypes(location + '/' + ITEMS, items, root);

		// validate that array does not contain additional items
		if (node.has(ADDITIONAL_ITEMS) && node.get(ADDITIONAL_ITEMS).isBoolean() &&
				node.get(ADDITIONAL_ITEMS).asBoolean()) {
			throw new IllegalArgumentException(
				"An array tuple must not allow additional items in node: " + location);
		}

		return Types.ROW(types);
	}
	throw new IllegalArgumentException(
		"Invalid type for '" + ITEMS + "' property in node: " + location);
}
 
Example 12
Source File: JsonRowSchemaConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
private static TypeInformation<Row> convertObject(String location, JsonNode node, JsonNode root) {
	// validate properties
	if (!node.has(PROPERTIES)) {
		return Types.ROW();
	}
	if (!node.isObject()) {
		throw new IllegalArgumentException(
			"Invalid '" + PROPERTIES + "' property for object type in node: " + location);
	}
	final JsonNode props = node.get(PROPERTIES);
	final String[] names = new String[props.size()];
	final TypeInformation<?>[] types = new TypeInformation[props.size()];

	final Iterator<Map.Entry<String, JsonNode>> fieldIter = props.fields();
	int i = 0;
	while (fieldIter.hasNext()) {
		final Map.Entry<String, JsonNode> subNode = fieldIter.next();

		// set field name
		names[i] = subNode.getKey();

		// set type
		types[i] = convertType(location + '/' + subNode.getKey(), subNode.getValue(), root);

		i++;
	}

	// validate that object does not contain additional properties
	if (node.has(ADDITIONAL_PROPERTIES) && node.get(ADDITIONAL_PROPERTIES).isBoolean() &&
			node.get(ADDITIONAL_PROPERTIES).asBoolean()) {
		throw new IllegalArgumentException(
			"An object must not allow additional properties in node: " + location);
	}

	return Types.ROW_NAMED(names, types);
}
 
Example 13
Source File: JsonRowSchemaConverter.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static TypeInformation<?> convertArray(String location, JsonNode node, JsonNode root) {
	// validate items
	if (!node.has(ITEMS)) {
		throw new IllegalArgumentException(
			"Arrays must specify an '" + ITEMS + "' property in node: " + location);
	}
	final JsonNode items = node.get(ITEMS);

	// list (translated to object array)
	if (items.isObject()) {
		final TypeInformation<?> elementType = convertType(
			location + '/' + ITEMS,
			items,
			root);
		// result type might either be ObjectArrayTypeInfo or BasicArrayTypeInfo for Strings
		return Types.OBJECT_ARRAY(elementType);
	}
	// tuple (translated to row)
	else if (items.isArray()) {
		final TypeInformation<?>[] types = convertTypes(location + '/' + ITEMS, items, root);

		// validate that array does not contain additional items
		if (node.has(ADDITIONAL_ITEMS) && node.get(ADDITIONAL_ITEMS).isBoolean() &&
				node.get(ADDITIONAL_ITEMS).asBoolean()) {
			throw new IllegalArgumentException(
				"An array tuple must not allow additional items in node: " + location);
		}

		return Types.ROW(types);
	}
	throw new IllegalArgumentException(
		"Invalid type for '" + ITEMS + "' property in node: " + location);
}
 
Example 14
Source File: JsonGeneratorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testGeneratorWithoutAnyAttachements() {
	try {
		JobVertex source1 = new JobVertex("source 1");
		
		JobVertex source2 = new JobVertex("source 2");
		source2.setInvokableClass(DummyInvokable.class);
		
		JobVertex source3 = new JobVertex("source 3");
		
		JobVertex intermediate1 = new JobVertex("intermediate 1");
		JobVertex intermediate2 = new JobVertex("intermediate 2");
		
		JobVertex join1 = new JobVertex("join 1");
		JobVertex join2 = new JobVertex("join 2");

		JobVertex sink1 = new JobVertex("sink 1");
		JobVertex sink2 = new JobVertex("sink 2");
		
		intermediate1.connectNewDataSetAsInput(source1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED);
		intermediate2.connectNewDataSetAsInput(source2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED);
		
		join1.connectNewDataSetAsInput(intermediate1, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING);
		join1.connectNewDataSetAsInput(intermediate2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING);

		join2.connectNewDataSetAsInput(join1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED);
		join2.connectNewDataSetAsInput(source3, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING);
		
		sink1.connectNewDataSetAsInput(join2, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED);
		sink2.connectNewDataSetAsInput(join1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED);

		JobGraph jg = new JobGraph("my job", source1, source2, source3,
				intermediate1, intermediate2, join1, join2, sink1, sink2);
		
		String plan = JsonPlanGenerator.generatePlan(jg);
		assertNotNull(plan);

		// validate the produced JSON
		ObjectMapper m = new ObjectMapper();
		JsonNode rootNode = m.readTree(plan);
		
		// core fields
		assertEquals(new TextNode(jg.getJobID().toString()), rootNode.get("jid"));
		assertEquals(new TextNode(jg.getName()), rootNode.get("name"));
		
		assertTrue(rootNode.path("nodes").isArray());
		
		for (Iterator<JsonNode> iter = rootNode.path("nodes").elements(); iter.hasNext(); ){
			JsonNode next = iter.next();
			
			JsonNode idNode = next.get("id");
			assertNotNull(idNode);
			assertTrue(idNode.isTextual());
			checkVertexExists(idNode.asText(), jg);
			
			String description = next.get("description").asText();
			assertTrue(
					description.startsWith("source") ||
					description.startsWith("sink") ||
					description.startsWith("intermediate") ||
					description.startsWith("join"));
		}
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example 15
Source File: JsonJobGraphGenerationTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public void validateJson(String json) throws Exception {
	final Map<String, JsonNode> idToNode = new HashMap<>();

	// validate the produced JSON
	ObjectMapper m = new ObjectMapper();
	JsonNode rootNode = m.readTree(json);

	JsonNode idField = rootNode.get("jid");
	JsonNode nameField = rootNode.get("name");
	JsonNode arrayField = rootNode.get("nodes");

	assertNotNull(idField);
	assertNotNull(nameField);
	assertNotNull(arrayField);
	assertTrue(idField.isTextual());
	assertTrue(nameField.isTextual());
	assertTrue(arrayField.isArray());

	ArrayNode array = (ArrayNode) arrayField;
	Iterator<JsonNode> iter = array.elements();
	while (iter.hasNext()) {
		JsonNode vertex = iter.next();

		JsonNode vertexIdField = vertex.get("id");
		JsonNode parallelismField = vertex.get("parallelism");
		JsonNode contentsFields = vertex.get("description");
		JsonNode operatorField = vertex.get("operator");

		assertNotNull(vertexIdField);
		assertTrue(vertexIdField.isTextual());
		assertNotNull(parallelismField);
		assertTrue(parallelismField.isNumber());
		assertNotNull(contentsFields);
		assertTrue(contentsFields.isTextual());
		assertNotNull(operatorField);
		assertTrue(operatorField.isTextual());

		if (contentsFields.asText().startsWith("Sync")) {
			assertEquals(1, parallelismField.asInt());
		}
		else {
			assertEquals(expectedParallelism, parallelismField.asInt());
		}

		idToNode.put(vertexIdField.asText(), vertex);
	}

	assertEquals(numNodes, idToNode.size());

	// check that all inputs are contained
	for (JsonNode node : idToNode.values()) {
		JsonNode inputsField = node.get("inputs");
		if (inputsField != null) {
			Iterator<JsonNode> inputsIter = inputsField.elements();
			while (inputsIter.hasNext()) {
				JsonNode inputNode = inputsIter.next();
				JsonNode inputIdField = inputNode.get("id");

				assertNotNull(inputIdField);
				assertTrue(inputIdField.isTextual());

				String inputIdString = inputIdField.asText();
				assertTrue(idToNode.containsKey(inputIdString));
			}
		}
	}
}
 
Example 16
Source File: HistoryServerArchiveFetcher.java    From flink with Apache License 2.0 4 votes vote down vote up
private static String convertLegacyJobOverview(String legacyOverview) throws IOException {
	JsonNode root = mapper.readTree(legacyOverview);
	JsonNode finishedJobs = root.get("finished");
	JsonNode job = finishedJobs.get(0);

	JobID jobId = JobID.fromHexString(job.get("jid").asText());
	String name = job.get("name").asText();
	JobStatus state = JobStatus.valueOf(job.get("state").asText());

	long startTime = job.get("start-time").asLong();
	long endTime = job.get("end-time").asLong();
	long duration = job.get("duration").asLong();
	long lastMod = job.get("last-modification").asLong();

	JsonNode tasks = job.get("tasks");
	int numTasks = tasks.get("total").asInt();
	JsonNode pendingNode = tasks.get("pending");
	// for flink version < 1.4 we have pending field,
	// when version >= 1.4 pending has been split into scheduled, deploying, and created.
	boolean versionLessThan14 = pendingNode != null;
	int created = 0;
	int scheduled;
	int deploying = 0;

	if (versionLessThan14) {
		// pending is a mix of CREATED/SCHEDULED/DEPLOYING
		// to maintain the correct number of task states we pick SCHEDULED
		scheduled = pendingNode.asInt();
	} else {
		created = tasks.get("created").asInt();
		scheduled = tasks.get("scheduled").asInt();
		deploying = tasks.get("deploying").asInt();
	}
	int running = tasks.get("running").asInt();
	int finished = tasks.get("finished").asInt();
	int canceling = tasks.get("canceling").asInt();
	int canceled = tasks.get("canceled").asInt();
	int failed = tasks.get("failed").asInt();

	int[] tasksPerState = new int[ExecutionState.values().length];
	tasksPerState[ExecutionState.CREATED.ordinal()] = created;
	tasksPerState[ExecutionState.SCHEDULED.ordinal()] = scheduled;
	tasksPerState[ExecutionState.DEPLOYING.ordinal()] = deploying;
	tasksPerState[ExecutionState.RUNNING.ordinal()] = running;
	tasksPerState[ExecutionState.FINISHED.ordinal()] = finished;
	tasksPerState[ExecutionState.CANCELING.ordinal()] = canceling;
	tasksPerState[ExecutionState.CANCELED.ordinal()] = canceled;
	tasksPerState[ExecutionState.FAILED.ordinal()] = failed;

	JobDetails jobDetails = new JobDetails(jobId, name, startTime, endTime, duration, state, lastMod, tasksPerState, numTasks);
	MultipleJobsDetails multipleJobsDetails = new MultipleJobsDetails(Collections.singleton(jobDetails));

	StringWriter sw = new StringWriter();
	mapper.writeValue(sw, multipleJobsDetails);
	return sw.toString();
}
 
Example 17
Source File: HistoryServerArchiveFetcher.java    From flink with Apache License 2.0 4 votes vote down vote up
private static String convertLegacyJobOverview(String legacyOverview) throws IOException {
	JsonNode root = mapper.readTree(legacyOverview);
	JsonNode finishedJobs = root.get("finished");
	JsonNode job = finishedJobs.get(0);

	JobID jobId = JobID.fromHexString(job.get("jid").asText());
	String name = job.get("name").asText();
	JobStatus state = JobStatus.valueOf(job.get("state").asText());

	long startTime = job.get("start-time").asLong();
	long endTime = job.get("end-time").asLong();
	long duration = job.get("duration").asLong();
	long lastMod = job.get("last-modification").asLong();

	JsonNode tasks = job.get("tasks");
	int numTasks = tasks.get("total").asInt();
	JsonNode pendingNode = tasks.get("pending");
	// for flink version < 1.4 we have pending field,
	// when version >= 1.4 pending has been split into scheduled, deploying, and created.
	boolean versionLessThan14 = pendingNode != null;
	int created = 0;
	int scheduled;
	int deploying = 0;

	if (versionLessThan14) {
		// pending is a mix of CREATED/SCHEDULED/DEPLOYING
		// to maintain the correct number of task states we pick SCHEDULED
		scheduled = pendingNode.asInt();
	} else {
		created = tasks.get("created").asInt();
		scheduled = tasks.get("scheduled").asInt();
		deploying = tasks.get("deploying").asInt();
	}
	int running = tasks.get("running").asInt();
	int finished = tasks.get("finished").asInt();
	int canceling = tasks.get("canceling").asInt();
	int canceled = tasks.get("canceled").asInt();
	int failed = tasks.get("failed").asInt();

	int[] tasksPerState = new int[ExecutionState.values().length];
	tasksPerState[ExecutionState.CREATED.ordinal()] = created;
	tasksPerState[ExecutionState.SCHEDULED.ordinal()] = scheduled;
	tasksPerState[ExecutionState.DEPLOYING.ordinal()] = deploying;
	tasksPerState[ExecutionState.RUNNING.ordinal()] = running;
	tasksPerState[ExecutionState.FINISHED.ordinal()] = finished;
	tasksPerState[ExecutionState.CANCELING.ordinal()] = canceling;
	tasksPerState[ExecutionState.CANCELED.ordinal()] = canceled;
	tasksPerState[ExecutionState.FAILED.ordinal()] = failed;

	JobDetails jobDetails = new JobDetails(jobId, name, startTime, endTime, duration, state, lastMod, tasksPerState, numTasks);
	MultipleJobsDetails multipleJobsDetails = new MultipleJobsDetails(Collections.singleton(jobDetails));

	StringWriter sw = new StringWriter();
	mapper.writeValue(sw, multipleJobsDetails);
	return sw.toString();
}
 
Example 18
Source File: HistoryServerArchiveFetcher.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
private static String convertLegacyJobOverview(String legacyOverview) throws IOException {
	JsonNode root = mapper.readTree(legacyOverview);
	JsonNode finishedJobs = root.get("finished");
	JsonNode job = finishedJobs.get(0);

	JobID jobId = JobID.fromHexString(job.get("jid").asText());
	String name = job.get("name").asText();
	JobStatus state = JobStatus.valueOf(job.get("state").asText());

	long startTime = job.get("start-time").asLong();
	long endTime = job.get("end-time").asLong();
	long duration = job.get("duration").asLong();
	long lastMod = job.get("last-modification").asLong();

	JsonNode tasks = job.get("tasks");
	int numTasks = tasks.get("total").asInt();
	JsonNode pendingNode = tasks.get("pending");
	// for flink version < 1.4 we have pending field,
	// when version >= 1.4 pending has been split into scheduled, deploying, and created.
	boolean versionLessThan14 = pendingNode != null;
	int created = 0;
	int scheduled;
	int deploying = 0;

	if (versionLessThan14) {
		// pending is a mix of CREATED/SCHEDULED/DEPLOYING
		// to maintain the correct number of task states we pick SCHEDULED
		scheduled = pendingNode.asInt();
	} else {
		created = tasks.get("created").asInt();
		scheduled = tasks.get("scheduled").asInt();
		deploying = tasks.get("deploying").asInt();
	}
	int running = tasks.get("running").asInt();
	int finished = tasks.get("finished").asInt();
	int canceling = tasks.get("canceling").asInt();
	int canceled = tasks.get("canceled").asInt();
	int failed = tasks.get("failed").asInt();

	int[] tasksPerState = new int[ExecutionState.values().length];
	tasksPerState[ExecutionState.CREATED.ordinal()] = created;
	tasksPerState[ExecutionState.SCHEDULED.ordinal()] = scheduled;
	tasksPerState[ExecutionState.DEPLOYING.ordinal()] = deploying;
	tasksPerState[ExecutionState.RUNNING.ordinal()] = running;
	tasksPerState[ExecutionState.FINISHED.ordinal()] = finished;
	tasksPerState[ExecutionState.CANCELING.ordinal()] = canceling;
	tasksPerState[ExecutionState.CANCELED.ordinal()] = canceled;
	tasksPerState[ExecutionState.FAILED.ordinal()] = failed;

	JobDetails jobDetails = new JobDetails(jobId, name, startTime, endTime, duration, state, lastMod, tasksPerState, numTasks);
	MultipleJobsDetails multipleJobsDetails = new MultipleJobsDetails(Collections.singleton(jobDetails));

	StringWriter sw = new StringWriter();
	mapper.writeValue(sw, multipleJobsDetails);
	return sw.toString();
}
 
Example 19
Source File: JsonJobGraphGenerationTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Override
public void validateJson(String json) throws Exception {
	final Map<String, JsonNode> idToNode = new HashMap<>();

	// validate the produced JSON
	ObjectMapper m = new ObjectMapper();
	JsonNode rootNode = m.readTree(json);

	JsonNode idField = rootNode.get("jid");
	JsonNode nameField = rootNode.get("name");
	JsonNode arrayField = rootNode.get("nodes");

	assertNotNull(idField);
	assertNotNull(nameField);
	assertNotNull(arrayField);
	assertTrue(idField.isTextual());
	assertTrue(nameField.isTextual());
	assertTrue(arrayField.isArray());

	ArrayNode array = (ArrayNode) arrayField;
	Iterator<JsonNode> iter = array.elements();
	while (iter.hasNext()) {
		JsonNode vertex = iter.next();

		JsonNode vertexIdField = vertex.get("id");
		JsonNode parallelismField = vertex.get("parallelism");
		JsonNode contentsFields = vertex.get("description");
		JsonNode operatorField = vertex.get("operator");

		assertNotNull(vertexIdField);
		assertTrue(vertexIdField.isTextual());
		assertNotNull(parallelismField);
		assertTrue(parallelismField.isNumber());
		assertNotNull(contentsFields);
		assertTrue(contentsFields.isTextual());
		assertNotNull(operatorField);
		assertTrue(operatorField.isTextual());

		if (contentsFields.asText().startsWith("Sync")) {
			assertEquals(1, parallelismField.asInt());
		}
		else {
			assertEquals(expectedParallelism, parallelismField.asInt());
		}

		idToNode.put(vertexIdField.asText(), vertex);
	}

	assertEquals(numNodes, idToNode.size());

	// check that all inputs are contained
	for (JsonNode node : idToNode.values()) {
		JsonNode inputsField = node.get("inputs");
		if (inputsField != null) {
			Iterator<JsonNode> inputsIter = inputsField.elements();
			while (inputsIter.hasNext()) {
				JsonNode inputNode = inputsIter.next();
				JsonNode inputIdField = inputNode.get("id");

				assertNotNull(inputIdField);
				assertTrue(inputIdField.isTextual());

				String inputIdString = inputIdField.asText();
				assertTrue(idToNode.containsKey(inputIdString));
			}
		}
	}
}
 
Example 20
Source File: JsonJobGraphGenerationTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public void validateJson(String json) throws Exception {
	final Map<String, JsonNode> idToNode = new HashMap<>();

	// validate the produced JSON
	ObjectMapper m = new ObjectMapper();
	JsonNode rootNode = m.readTree(json);

	JsonNode idField = rootNode.get("jid");
	JsonNode nameField = rootNode.get("name");
	JsonNode arrayField = rootNode.get("nodes");

	assertNotNull(idField);
	assertNotNull(nameField);
	assertNotNull(arrayField);
	assertTrue(idField.isTextual());
	assertTrue(nameField.isTextual());
	assertTrue(arrayField.isArray());

	ArrayNode array = (ArrayNode) arrayField;
	Iterator<JsonNode> iter = array.elements();
	while (iter.hasNext()) {
		JsonNode vertex = iter.next();

		JsonNode vertexIdField = vertex.get("id");
		JsonNode parallelismField = vertex.get("parallelism");
		JsonNode contentsFields = vertex.get("description");
		JsonNode operatorField = vertex.get("operator");

		assertNotNull(vertexIdField);
		assertTrue(vertexIdField.isTextual());
		assertNotNull(parallelismField);
		assertTrue(parallelismField.isNumber());
		assertNotNull(contentsFields);
		assertTrue(contentsFields.isTextual());
		assertNotNull(operatorField);
		assertTrue(operatorField.isTextual());

		if (contentsFields.asText().startsWith("Sync")) {
			assertEquals(1, parallelismField.asInt());
		}
		else {
			assertEquals(expectedParallelism, parallelismField.asInt());
		}

		idToNode.put(vertexIdField.asText(), vertex);
	}

	assertEquals(numNodes, idToNode.size());

	// check that all inputs are contained
	for (JsonNode node : idToNode.values()) {
		JsonNode inputsField = node.get("inputs");
		if (inputsField != null) {
			Iterator<JsonNode> inputsIter = inputsField.elements();
			while (inputsIter.hasNext()) {
				JsonNode inputNode = inputsIter.next();
				JsonNode inputIdField = inputNode.get("id");

				assertNotNull(inputIdField);
				assertTrue(inputIdField.isTextual());

				String inputIdString = inputIdField.asText();
				assertTrue(idToNode.containsKey(inputIdString));
			}
		}
	}
}