org.apache.flink.util.WrappingRuntimeException Java Examples

The following examples show how to use org.apache.flink.util.WrappingRuntimeException. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SharedBufferAccessor.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Extracts the real event from the sharedBuffer with pre-extracted eventId.
 *
 * @param match the matched event's eventId.
 * @return the event associated with the eventId.
 */
public Map<String, List<V>> materializeMatch(Map<String, List<EventId>> match) {
	Map<String, List<V>> materializedMatch = new LinkedHashMap<>(match.size());

	for (Map.Entry<String, List<EventId>> pattern : match.entrySet()) {
		List<V> events = new ArrayList<>(pattern.getValue().size());
		for (EventId eventId : pattern.getValue()) {
			try {
				V event = sharedBuffer.getEvent(eventId).getElement();
				events.add(event);
			} catch (Exception ex) {
				throw new WrappingRuntimeException(ex);
			}
		}
		materializedMatch.put(pattern.getKey(), events);
	}

	return materializedMatch;
}
 
Example #2
Source File: MailboxProcessor.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Reports a throwable for rethrowing from the mailbox thread. This will clear and cancel all other pending mails.
 * @param throwable to report by rethrowing from the mailbox loop.
 */
public void reportThrowable(Throwable throwable) {
	sendControlMail(
		() -> {
			if (throwable instanceof Exception) {
				throw (Exception) throwable;
			}
			else if (throwable instanceof Error) {
				throw (Error) throwable;
			}
			else {
				throw WrappingRuntimeException.wrapIfNecessary(throwable);
			}
		},
		"Report throwable %s", throwable);
}
 
Example #3
Source File: SharedBufferAccessor.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Extracts the real event from the sharedBuffer with pre-extracted eventId.
 *
 * @param match the matched event's eventId.
 * @return the event associated with the eventId.
 */
public Map<String, List<V>> materializeMatch(Map<String, List<EventId>> match) {
	Map<String, List<V>> materializedMatch = new LinkedHashMap<>(match.size());

	for (Map.Entry<String, List<EventId>> pattern : match.entrySet()) {
		List<V> events = new ArrayList<>(pattern.getValue().size());
		for (EventId eventId : pattern.getValue()) {
			try {
				V event = sharedBuffer.getEvent(eventId).getElement();
				events.add(event);
			} catch (Exception ex) {
				throw new WrappingRuntimeException(ex);
			}
		}
		materializedMatch.put(pattern.getKey(), events);
	}

	return materializedMatch;
}
 
Example #4
Source File: SharedBufferAccessor.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Extracts the real event from the sharedBuffer with pre-extracted eventId.
 *
 * @param match the matched event's eventId.
 * @return the event associated with the eventId.
 */
public Map<String, List<V>> materializeMatch(Map<String, List<EventId>> match) {
	Map<String, List<V>> materializedMatch = new LinkedHashMap<>(match.size());

	for (Map.Entry<String, List<EventId>> pattern : match.entrySet()) {
		List<V> events = new ArrayList<>(pattern.getValue().size());
		for (EventId eventId : pattern.getValue()) {
			try {
				V event = sharedBuffer.getEvent(eventId).getElement();
				events.add(event);
			} catch (Exception ex) {
				throw new WrappingRuntimeException(ex);
			}
		}
		materializedMatch.put(pattern.getKey(), events);
	}

	return materializedMatch;
}
 
Example #5
Source File: RegistryAvroSerializationSchema.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public byte[] serialize(T object) {
	checkAvroInitialized();

	if (object == null) {
		return null;
	} else {
		try {
			ByteArrayOutputStream outputStream = getOutputStream();
			outputStream.reset();
			Encoder encoder = getEncoder();
			schemaCoder.writeSchema(getSchema(), outputStream);
			getDatumWriter().write(object, encoder);
			encoder.flush();
			return outputStream.toByteArray();
		} catch (IOException e) {
			throw new WrappingRuntimeException("Failed to serialize schema registry.", e);
		}
	}
}
 
Example #6
Source File: AvroSerializationSchema.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public byte[] serialize(T object) {
	checkAvroInitialized();

	if (object == null) {
		return null;
	} else {
		try {
			datumWriter.write(object, encoder);
			encoder.flush();
			byte[] bytes = arrayOutputStream.toByteArray();
			arrayOutputStream.reset();
			return bytes;
		} catch (IOException e) {
			throw new WrappingRuntimeException("Failed to serialize schema registry.", e);
		}

	}
}
 
Example #7
Source File: SharedBuffer.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * It always returns node either from state or cache.
 *
 * @param nodeId id of the node
 * @return SharedBufferNode
 */
Lockable<SharedBufferNode> getEntry(NodeId nodeId) {
	return entryCache.computeIfAbsent(nodeId, id -> {
		try {
			return entries.get(id);
		} catch (Exception ex) {
			throw new WrappingRuntimeException(ex);
		}
	});
}
 
Example #8
Source File: MailboxExecutorImpl.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public boolean tryYield() {
	Optional<Mail> optionalMail = mailbox.tryTake(priority);
	if (optionalMail.isPresent()) {
		try {
			optionalMail.get().run();
		} catch (Exception ex) {
			throw WrappingRuntimeException.wrapIfNecessary(ex);
		}
		return true;
	} else {
		return false;
	}
}
 
Example #9
Source File: MailboxExecutorImpl.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void yield() throws InterruptedException {
	Mail mail = mailbox.take(priority);
	try {
		mail.run();
	} catch (Exception ex) {
		throw WrappingRuntimeException.wrapIfNecessary(ex);
	}
}
 
Example #10
Source File: StreamExecutionEnvironment.java    From flink with Apache License 2.0 5 votes vote down vote up
private StateBackend loadStateBackend(ReadableConfig configuration, ClassLoader classLoader) {
	try {
		return StateBackendLoader.loadStateBackendFromConfig(
			configuration,
			classLoader,
			null);
	} catch (DynamicCodeLoadingException | IOException e) {
		throw new WrappingRuntimeException(e);
	}
}
 
Example #11
Source File: StreamExecutionEnvironment.java    From flink with Apache License 2.0 5 votes vote down vote up
private void registerCustomListeners(final ClassLoader classLoader, final List<String> listeners) {
	for (String listener : listeners) {
		try {
			final JobListener jobListener = InstantiationUtil.instantiate(
					listener, JobListener.class, classLoader);
			jobListeners.add(jobListener);
		} catch (FlinkException e) {
			throw new WrappingRuntimeException("Could not load JobListener : " + listener, e);
		}
	}
}
 
Example #12
Source File: SharedBuffer.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * It always returns event either from state or cache.
 *
 * @param eventId id of the event
 * @return event
 */
Lockable<V> getEvent(EventId eventId) {
	return eventsBufferCache.computeIfAbsent(eventId, id -> {
		try {
			return eventsBuffer.get(id);
		} catch (Exception ex) {
			throw new WrappingRuntimeException(ex);
		}
	});
}
 
Example #13
Source File: SharedBuffer.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * It always returns node either from state or cache.
 *
 * @param nodeId id of the node
 * @return SharedBufferNode
 */
Lockable<SharedBufferNode> getEntry(NodeId nodeId) {
	return entryCache.computeIfAbsent(nodeId, id -> {
		try {
			return entries.get(id);
		} catch (Exception ex) {
			throw new WrappingRuntimeException(ex);
		}
	});
}
 
Example #14
Source File: ExecutionEnvironment.java    From flink with Apache License 2.0 5 votes vote down vote up
private void registerCustomListeners(final ClassLoader classLoader, final List<String> listeners) {
	for (String listener : listeners) {
		try {
			final JobListener jobListener = InstantiationUtil.instantiate(
					listener, JobListener.class, classLoader);
			jobListeners.add(jobListener);
		} catch (FlinkException e) {
			throw new WrappingRuntimeException("Could not load JobListener : " + listener, e);
		}
	}
}
 
Example #15
Source File: JsonRowSerializationSchema.java    From flink with Apache License 2.0 5 votes vote down vote up
private SerializationRuntimeConverter createFallbackConverter() {
	return (mapper, reuse, object) -> {
		// for types that were specified without JSON schema
		// e.g. POJOs
		try {
			return mapper.valueToTree(object);
		} catch (IllegalArgumentException e) {
			throw new WrappingRuntimeException(format("Could not convert object: %s", object), e);
		}
	};
}
 
Example #16
Source File: CollectAggFunction.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
@SuppressWarnings("unchecked")
public TypeInformation<CollectAccumulator<T>> getAccumulatorType() {
	try {
		Class<CollectAccumulator<T>> clazz = (Class<CollectAccumulator<T>>) (Class) CollectAccumulator.class;
		List<PojoField> pojoFields = new ArrayList<>();
		pojoFields.add(new PojoField(
			clazz.getDeclaredField("map"),
			new MapViewTypeInfo<>(elementType, BasicTypeInfo.INT_TYPE_INFO)));
		return new PojoTypeInfo<>(clazz, pojoFields);
	} catch (NoSuchFieldException e) {
		throw new WrappingRuntimeException(e);
	}
}
 
Example #17
Source File: LazyBinaryFormat.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Ensure we have materialized binary format.
 */
public final void ensureMaterialized(TypeSerializer<T> serializer) {
	if (binarySection == null) {
		try {
			this.binarySection = materialize(serializer);
		} catch (IOException e) {
			throw new WrappingRuntimeException(e);
		}
	}
}
 
Example #18
Source File: SharedBuffer.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * It always returns event either from state or cache.
 *
 * @param eventId id of the event
 * @return event
 */
Lockable<V> getEvent(EventId eventId) {
	return eventsBufferCache.computeIfAbsent(eventId, id -> {
		try {
			return eventsBuffer.get(id);
		} catch (Exception ex) {
			throw new WrappingRuntimeException(ex);
		}
	});
}
 
Example #19
Source File: SharedBuffer.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * It always returns node either from state or cache.
 *
 * @param nodeId id of the node
 * @return SharedBufferNode
 */
Lockable<SharedBufferNode> getEntry(NodeId nodeId) {
	return entryCache.computeIfAbsent(nodeId, id -> {
		try {
			return entries.get(id);
		} catch (Exception ex) {
			throw new WrappingRuntimeException(ex);
		}
	});
}
 
Example #20
Source File: JsonRowDeserializationSchema.java    From flink with Apache License 2.0 5 votes vote down vote up
private DeserializationRuntimeConverter createFallbackConverter(Class<?> valueType) {
	return (mapper, jsonNode) -> {
		// for types that were specified without JSON schema
		// e.g. POJOs
		try {
			return mapper.treeToValue(jsonNode, valueType);
		} catch (JsonProcessingException e) {
			throw new WrappingRuntimeException(format("Could not convert node: %s", jsonNode), e);
		}
	};
}
 
Example #21
Source File: JsonRowDeserializationSchema.java    From flink with Apache License 2.0 5 votes vote down vote up
private DeserializationRuntimeConverter createByteArrayConverter() {
	return (mapper, jsonNode) -> {
		try {
			return jsonNode.binaryValue();
		} catch (IOException e) {
			throw new WrappingRuntimeException("Unable to deserialize byte array.", e);
		}
	};
}
 
Example #22
Source File: JsonRowSerializationSchema.java    From flink with Apache License 2.0 5 votes vote down vote up
private SerializationRuntimeConverter createFallbackConverter() {
	return (mapper, reuse, object) -> {
		// for types that were specified without JSON schema
		// e.g. POJOs
		try {
			return mapper.valueToTree(object);
		} catch (IllegalArgumentException e) {
			throw new WrappingRuntimeException(format("Could not convert object: %s", object), e);
		}
	};
}
 
Example #23
Source File: SharedBuffer.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * It always returns event either from state or cache.
 *
 * @param eventId id of the event
 * @return event
 */
Lockable<V> getEvent(EventId eventId) {
	return eventsBufferCache.computeIfAbsent(eventId, id -> {
		try {
			return eventsBuffer.get(id);
		} catch (Exception ex) {
			throw new WrappingRuntimeException(ex);
		}
	});
}
 
Example #24
Source File: SimpleCatalogFactory.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public Catalog createCatalog(String name, Map<String, String> properties) {
	String database = properties.getOrDefault(
		CatalogDescriptorValidator.CATALOG_DEFAULT_DATABASE,
		"default_database");
	GenericInMemoryCatalog genericInMemoryCatalog = new GenericInMemoryCatalog(name, database);

	String tableName = properties.getOrDefault(TEST_TABLE_NAME, TEST_TABLE_NAME);
	StreamTableSource<Row> tableSource = new StreamTableSource<Row>() {
		@Override
		public DataStream<Row> getDataStream(StreamExecutionEnvironment execEnv) {
			return execEnv.fromCollection(TABLE_CONTENTS)
				.returns(new RowTypeInfo(
					new TypeInformation[]{Types.INT(), Types.STRING()},
					new String[]{"id", "string"}));
		}

		@Override
		public TableSchema getTableSchema() {
			return TableSchema.builder()
				.field("id", DataTypes.INT())
				.field("string", DataTypes.STRING())
				.build();
		}

		@Override
		public DataType getProducedDataType() {
			return DataTypes.ROW(
				DataTypes.FIELD("id", DataTypes.INT()),
				DataTypes.FIELD("string", DataTypes.STRING())
			);
		}
	};

	try {
		genericInMemoryCatalog.createTable(
			new ObjectPath(database, tableName),
			ConnectorCatalogTable.source(tableSource, false),
			false
		);
	} catch (Exception e) {
		throw new WrappingRuntimeException(e);
	}

	return genericInMemoryCatalog;
}
 
Example #25
Source File: MailboxProcessor.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Reports a throwable for rethrowing from the mailbox thread. This will clear and cancel all other pending letters.
 * @param throwable to report by rethrowing from the mailbox loop.
 */
public void reportThrowable(Throwable throwable) {
	sendPriorityLetter(() -> {
		throw new WrappingRuntimeException(throwable);
	});
}
 
Example #26
Source File: SimpleCatalogFactory.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public Catalog createCatalog(String name, Map<String, String> properties) {
	String database = properties.getOrDefault(
		CatalogDescriptorValidator.CATALOG_DEFAULT_DATABASE,
		"default_database");
	GenericInMemoryCatalog genericInMemoryCatalog = new GenericInMemoryCatalog(name, database);

	String tableName = properties.getOrDefault(TEST_TABLE_NAME, TEST_TABLE_NAME);
	StreamTableSource<Row> tableSource = new StreamTableSource<Row>() {
		@Override
		public DataStream<Row> getDataStream(StreamExecutionEnvironment execEnv) {
			return execEnv.fromCollection(TABLE_CONTENTS)
				.returns(new RowTypeInfo(
					new TypeInformation[]{Types.INT(), Types.STRING()},
					new String[]{"id", "string"}));
		}

		@Override
		public TableSchema getTableSchema() {
			return TableSchema.builder()
				.field("id", DataTypes.INT())
				.field("string", DataTypes.STRING())
				.build();
		}

		@Override
		public DataType getProducedDataType() {
			return DataTypes.ROW(
				DataTypes.FIELD("id", DataTypes.INT()),
				DataTypes.FIELD("string", DataTypes.STRING())
			);
		}
	};

	try {
		genericInMemoryCatalog.createTable(
			new ObjectPath(database, tableName),
			ConnectorCatalogTable.source(tableSource, false),
			false
		);
	} catch (Exception e) {
		throw new WrappingRuntimeException(e);
	}

	return genericInMemoryCatalog;
}