Java Code Examples for org.apache.flink.util.InstantiationUtil
The following examples show how to use
org.apache.flink.util.InstantiationUtil.
These examples are extracted from open source projects.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source Project: Flink-CEPplus Author: ljygz File: KafkaShortRetentionTestBase.java License: Apache License 2.0 | 6 votes |
@BeforeClass public static void prepare() throws ClassNotFoundException { LOG.info("-------------------------------------------------------------------------"); LOG.info(" Starting KafkaShortRetentionTestBase "); LOG.info("-------------------------------------------------------------------------"); // dynamically load the implementation for the test Class<?> clazz = Class.forName("org.apache.flink.streaming.connectors.kafka.KafkaTestEnvironmentImpl"); kafkaServer = (KafkaTestEnvironment) InstantiationUtil.instantiate(clazz); LOG.info("Starting KafkaTestBase.prepare() for Kafka " + kafkaServer.getVersion()); if (kafkaServer.isSecureRunSupported()) { secureProps = kafkaServer.getSecureProperties(); } Properties specificProperties = new Properties(); specificProperties.setProperty("log.retention.hours", "0"); specificProperties.setProperty("log.retention.minutes", "0"); specificProperties.setProperty("log.retention.ms", "250"); specificProperties.setProperty("log.retention.check.interval.ms", "100"); kafkaServer.prepare(kafkaServer.createConfig().setKafkaServerProperties(specificProperties)); standardProps = kafkaServer.getStandardProperties(); }
Example #2
Source Project: Flink-CEPplus Author: ljygz File: JobManagerWatermarkTracker.java License: Apache License 2.0 | 6 votes |
@Override public Map<String, WatermarkState> add(byte[] valueBytes, Map<String, WatermarkState> accumulator) { addCount++; final WatermarkUpdate value; try { value = InstantiationUtil.deserializeObject(valueBytes, this.getClass().getClassLoader()); } catch (Exception e) { throw new RuntimeException(e); } WatermarkState ws = accumulator.get(value.id); if (ws == null) { accumulator.put(value.id, ws = new WatermarkState()); } ws.watermark = value.watermark; ws.lastUpdated = System.currentTimeMillis(); return accumulator; }
Example #3
Source Project: flink Author: apache File: KryoRegistrationSerializerConfigSnapshot.java License: Apache License 2.0 | 6 votes |
@Override public void write(DataOutputView out) throws IOException { out.writeUTF(kryoRegistration.getRegisteredClass().getName()); final KryoRegistration.SerializerDefinitionType serializerDefinitionType = kryoRegistration.getSerializerDefinitionType(); out.writeInt(serializerDefinitionType.ordinal()); switch (serializerDefinitionType) { case UNSPECIFIED: // nothing else to write break; case CLASS: out.writeUTF(kryoRegistration.getSerializerClass().getName()); break; case INSTANCE: try (final DataOutputViewStream outViewWrapper = new DataOutputViewStream(out)) { InstantiationUtil.serializeObject(outViewWrapper, kryoRegistration.getSerializableSerializerInstance()); } break; default: // this should not happen; adding as a guard for the future throw new IllegalStateException( "Unrecognized Kryo registration serializer definition type: " + serializerDefinitionType); } }
Example #4
Source Project: flink Author: apache File: StatefulComplexPayloadSerializer.java License: Apache License 2.0 | 6 votes |
@Override public ComplexPayload copy(ComplexPayload from) { try { Thread currentThread = Thread.currentThread(); if (currentOwnerThread.compareAndSet(null, currentThread)) { return InstantiationUtil.deserializeObject( InstantiationUtil.serializeObject(from), currentThread.getContextClassLoader()); } else { throw new IllegalStateException("Concurrent access to type serializer detected!"); } } catch (Exception e) { throw new RuntimeException(e); } finally { currentOwnerThread.set(null); } }
Example #5
Source Project: flink Author: apache File: PojoSerializerSnapshotData.java License: Apache License 2.0 | 6 votes |
private static <T> PojoSerializerSnapshotData<T> readSnapshotData(DataInputView in, ClassLoader userCodeClassLoader) throws IOException { Class<T> pojoClass = InstantiationUtil.resolveClassByName(in, userCodeClassLoader); LinkedOptionalMap<Field, TypeSerializerSnapshot<?>> fieldSerializerSnapshots = readOptionalMap( in, fieldReader(userCodeClassLoader), snapshotReader(userCodeClassLoader)); LinkedOptionalMap<Class<?>, TypeSerializerSnapshot<?>> registeredSubclassSerializerSnapshots = readOptionalMap( in, classReader(userCodeClassLoader), snapshotReader(userCodeClassLoader)); LinkedOptionalMap<Class<?>, TypeSerializerSnapshot<?>> nonRegisteredSubclassSerializerSnapshots = readOptionalMap( in, classReader(userCodeClassLoader), snapshotReader(userCodeClassLoader)); return new PojoSerializerSnapshotData<>(pojoClass, fieldSerializerSnapshots, registeredSubclassSerializerSnapshots, nonRegisteredSubclassSerializerSnapshots); }
Example #6
Source Project: flink-dataflow Author: dataArtisans File: FlinkStateInternals.java License: Apache License 2.0 | 6 votes |
@Override public void persistState(StateCheckpointWriter checkpointBuilder) throws IOException { if (!contents.isEmpty()) { // serialize the coder. byte[] coder = InstantiationUtil.serializeObject(elemCoder); checkpointBuilder.addListUpdatesBuilder() .setTag(stateKey) .setData(coder) .writeInt(contents.size()); for (T item : contents) { // encode the element ByteString.Output stream = ByteString.newOutput(); elemCoder.encode(item, stream, Coder.Context.OUTER); ByteString data = stream.toByteString(); // add the data to the checkpoint. checkpointBuilder.setData(data); } } }
Example #7
Source Project: Flink-CEPplus Author: ljygz File: TypeExtractor.java License: Apache License 2.0 | 6 votes |
/** * Returns the type information factory for a type using the factory registry or annotations. */ @Internal public static <OUT> TypeInfoFactory<OUT> getTypeInfoFactory(Type t) { final Class<?> factoryClass; if (registeredTypeInfoFactories.containsKey(t)) { factoryClass = registeredTypeInfoFactories.get(t); } else { if (!isClassType(t) || !typeToClass(t).isAnnotationPresent(TypeInfo.class)) { return null; } final TypeInfo typeInfoAnnotation = typeToClass(t).getAnnotation(TypeInfo.class); factoryClass = typeInfoAnnotation.value(); // check for valid factory class if (!TypeInfoFactory.class.isAssignableFrom(factoryClass)) { throw new InvalidTypesException("TypeInfo annotation does not specify a valid TypeInfoFactory."); } } // instantiate return (TypeInfoFactory<OUT>) InstantiationUtil.instantiate(factoryClass); }
Example #8
Source Project: flink Author: flink-tpc-ds File: TaskEventTest.java License: Apache License 2.0 | 6 votes |
/** * This test checks the serialization/deserialization of {@link IntegerTaskEvent} objects. */ @Test public void testIntegerTaskEvent() { try { final IntegerTaskEvent orig = new IntegerTaskEvent(11); final IntegerTaskEvent copy = InstantiationUtil.createCopyWritable(orig); assertEquals(orig.getInteger(), copy.getInteger()); assertEquals(orig.hashCode(), copy.hashCode()); assertTrue(orig.equals(copy)); } catch (IOException ioe) { fail(ioe.getMessage()); } }
Example #9
Source Project: flink Author: apache File: AbstractIterativeTask.java License: Apache License 2.0 | 6 votes |
private void reinstantiateDriver() throws Exception { if (this.driver instanceof ResettableDriver) { final ResettableDriver<?, ?> resDriver = (ResettableDriver<?, ?>) this.driver; resDriver.reset(); } else { Class<? extends Driver<S, OT>> driverClass = this.config.getDriver(); this.driver = InstantiationUtil.instantiate(driverClass, Driver.class); try { this.driver.setup(this); } catch (Throwable t) { throw new Exception("The pact driver setup for '" + this.getEnvironment().getTaskInfo().getTaskName() + "' , caused an error: " + t.getMessage(), t); } } }
Example #10
Source Project: flink Author: flink-tpc-ds File: TypeSerializerSerializationUtil.java License: Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") @Override public void read(DataInputView in) throws IOException { super.read(in); // read in a way that allows the stream to recover from exceptions int serializerBytes = in.readInt(); byte[] buffer = new byte[serializerBytes]; in.readFully(buffer); ClassLoader previousClassLoader = Thread.currentThread().getContextClassLoader(); try ( InstantiationUtil.FailureTolerantObjectInputStream ois = new InstantiationUtil.FailureTolerantObjectInputStream(new ByteArrayInputStream(buffer), userClassLoader)) { Thread.currentThread().setContextClassLoader(userClassLoader); typeSerializer = (TypeSerializer<T>) ois.readObject(); } catch (Exception e) { throw new UnloadableTypeSerializerException(e, buffer); } finally { Thread.currentThread().setContextClassLoader(previousClassLoader); } }
Example #11
Source Project: Alink Author: alibaba File: GenericCsvInputFormat.java License: Apache License 2.0 | 6 votes |
private void initializeParsers() { Class<?>[] fieldClasses = extractTypeClasses(fieldTypes); // instantiate the parsers FieldParser<?>[] parsers = new FieldParser<?>[fieldClasses.length]; for (int i = 0; i < fieldClasses.length; i++) { if (fieldClasses[i] != null) { Class<? extends FieldParser<?>> parserType = FieldParser.getParserForType(fieldClasses[i]); if (parserType == null) { throw new RuntimeException("No parser available for type '" + fieldClasses[i].getName() + "'."); } FieldParser<?> p = InstantiationUtil.instantiate(parserType, FieldParser.class); p.setCharset(charset); parsers[i] = p; } } this.fieldParsers = parsers; this.holders = new Object[fieldTypes.length]; for (int i = 0; i < fieldTypes.length; i++) { holders[i] = fieldParsers[i].createValue(); } }
Example #12
Source Project: Flink-CEPplus Author: ljygz File: Optimizer.java License: Apache License 2.0 | 6 votes |
private OptimizerPostPass getPostPassFromPlan(Plan program) { final String className = program.getPostPassClassName(); if (className == null) { throw new CompilerException("Optimizer Post Pass class description is null"); } try { Class<? extends OptimizerPostPass> clazz = Class.forName(className).asSubclass(OptimizerPostPass.class); try { return InstantiationUtil.instantiate(clazz, OptimizerPostPass.class); } catch (RuntimeException rtex) { // unwrap the source exception if (rtex.getCause() != null) { throw new CompilerException("Cannot instantiate optimizer post pass: " + rtex.getMessage(), rtex.getCause()); } else { throw rtex; } } } catch (ClassNotFoundException cnfex) { throw new CompilerException("Cannot load Optimizer post-pass class '" + className + "'.", cnfex); } catch (ClassCastException ccex) { throw new CompilerException("Class '" + className + "' is not an optimizer post-pass.", ccex); } }
Example #13
Source Project: flink Author: apache File: JavaSerializer.java License: Apache License 2.0 | 6 votes |
@SuppressWarnings({"unchecked", "rawtypes"}) @Override public T read(Kryo kryo, Input input, Class aClass) { try { ObjectMap graphContext = kryo.getGraphContext(); ObjectInputStream objectStream = (ObjectInputStream)graphContext.get(this); if (objectStream == null) { // make sure we use Kryo's classloader objectStream = new InstantiationUtil.ClassLoaderObjectInputStream(input, kryo.getClassLoader()); graphContext.put(this, objectStream); } return (T) objectStream.readObject(); } catch (Exception ex) { throw new KryoException("Error during Java deserialization.", ex); } }
Example #14
Source Project: flink Author: flink-tpc-ds File: FileCacheReadsFromBlobTest.java License: Apache License 2.0 | 6 votes |
@Test public void testFileDownloadedFromBlob() throws Exception { JobID jobID = new JobID(); ExecutionAttemptID attemptID = new ExecutionAttemptID(); final String fileName = "test_file"; // copy / create the file final DistributedCache.DistributedCacheEntry entry = new DistributedCache.DistributedCacheEntry( fileName, false, InstantiationUtil.serializeObject(permanentBlobKey)); Future<Path> copyResult = fileCache.createTmpFile(fileName, entry, jobID, attemptID); final Path dstPath = copyResult.get(); final String actualContent = Files.toString(new File(dstPath.toUri()), StandardCharsets.UTF_8); assertTrue(dstPath.getFileSystem().exists(dstPath)); assertEquals(testFileContent, actualContent); }
Example #15
Source Project: flink Author: apache File: StreamConfig.java License: Apache License 2.0 | 5 votes |
public Map<Integer, StreamConfig> getTransitiveChainedTaskConfigs(ClassLoader cl) { try { Map<Integer, StreamConfig> confs = InstantiationUtil.readObjectFromConfig(this.config, CHAINED_TASK_CONFIG, cl); return confs == null ? new HashMap<Integer, StreamConfig>() : confs; } catch (Exception e) { throw new StreamTaskException("Could not instantiate configuration.", e); } }
Example #16
Source Project: flink Author: apache File: RuntimeSerializerFactory.java License: Apache License 2.0 | 5 votes |
@Override public void writeParametersToConfig(Configuration config) { try { InstantiationUtil.writeObjectToConfig(clazz, config, CONFIG_KEY_CLASS); InstantiationUtil.writeObjectToConfig(serializer, config, CONFIG_KEY_SER); } catch (Exception e) { throw new RuntimeException("Could not serialize serializer into the configuration.", e); } }
Example #17
Source Project: flink Author: flink-tpc-ds File: FlinkKinesisConsumer.java License: Apache License 2.0 | 5 votes |
/** * Creates a new Flink Kinesis Consumer. * * <p>The AWS credentials to be used, AWS region of the Kinesis streams, initial position to start streaming * from are configured with a {@link Properties} instance.</p> * * @param streams * The AWS Kinesis streams to read from. * @param deserializer * The keyed deserializer used to convert raw bytes of Kinesis records to Java objects. * @param configProps * The properties used to configure AWS credentials, AWS region, and initial starting position. */ public FlinkKinesisConsumer(List<String> streams, KinesisDeserializationSchema<T> deserializer, Properties configProps) { checkNotNull(streams, "streams can not be null"); checkArgument(streams.size() != 0, "must be consuming at least 1 stream"); checkArgument(!streams.contains(""), "stream names cannot be empty Strings"); this.streams = streams; this.configProps = checkNotNull(configProps, "configProps can not be null"); // check the configuration properties for any conflicting settings KinesisConfigUtil.validateConsumerConfiguration(this.configProps); checkNotNull(deserializer, "deserializer can not be null"); checkArgument( InstantiationUtil.isSerializable(deserializer), "The provided deserialization schema is not serializable: " + deserializer.getClass().getName() + ". " + "Please check that it does not contain references to non-serializable instances."); this.deserializer = deserializer; if (LOG.isInfoEnabled()) { StringBuilder sb = new StringBuilder(); for (String stream : streams) { sb.append(stream).append(", "); } LOG.info("Flink Kinesis Consumer is going to read the following streams: {}", sb.toString()); } }
Example #18
Source Project: flink Author: apache File: TaskConfig.java License: Apache License 2.0 | 5 votes |
private void setTypeSerializerFactory(TypeSerializerFactory<?> factory, String classNameKey, String parametersPrefix) { // sanity check the factory type InstantiationUtil.checkForInstantiation(factory.getClass()); // store the type this.config.setString(classNameKey, factory.getClass().getName()); // store the parameters final DelegatingConfiguration parameters = new DelegatingConfiguration(this.config, parametersPrefix); factory.writeParametersToConfig(parameters); }
Example #19
Source Project: flink Author: flink-tpc-ds File: CollectorOutput.java License: Apache License 2.0 | 5 votes |
@Override public void collect(StreamRecord<T> record) { try { ClassLoader cl = record.getClass().getClassLoader(); T copied = InstantiationUtil.deserializeObject(InstantiationUtil.serializeObject(record.getValue()), cl); list.add(record.copy(copied)); } catch (IOException | ClassNotFoundException ex) { throw new RuntimeException("Unable to deserialize record: " + record, ex); } }
Example #20
Source Project: Flink-CEPplus Author: ljygz File: FileArchivedExecutionGraphStore.java License: Apache License 2.0 | 5 votes |
private void storeArchivedExecutionGraph(ArchivedExecutionGraph archivedExecutionGraph) throws IOException { final File archivedExecutionGraphFile = getExecutionGraphFile(archivedExecutionGraph.getJobID()); try (FileOutputStream fileOutputStream = new FileOutputStream(archivedExecutionGraphFile)) { InstantiationUtil.serializeObject(fileOutputStream, archivedExecutionGraph); } }
Example #21
Source Project: flink Author: flink-tpc-ds File: HiveAggSqlFunction.java License: Apache License 2.0 | 5 votes |
@Override public AggregateFunction makeFunction(Object[] constantArguments, LogicalType[] argTypes) { AggregateFunction clone; try { clone = InstantiationUtil.clone(aggregateFunction); } catch (IOException | ClassNotFoundException e) { throw new RuntimeException(e); } return (AggregateFunction) invokeSetArgs(clone, constantArguments, argTypes); }
Example #22
Source Project: flink Author: flink-tpc-ds File: StreamConfig.java License: Apache License 2.0 | 5 votes |
public void setNonChainedOutputs(List<StreamEdge> outputvertexIDs) { try { InstantiationUtil.writeObjectToConfig(outputvertexIDs, this.config, NONCHAINED_OUTPUTS); } catch (IOException e) { throw new StreamTaskException("Cannot serialize non chained outputs.", e); } }
Example #23
Source Project: flink Author: apache File: TimestampedHiveInputSplit.java License: Apache License 2.0 | 5 votes |
@Override public TimestampedHiveInputSplit copy(TimestampedHiveInputSplit from) { try { return InstantiationUtil.clone(from, Thread.currentThread().getContextClassLoader()); } catch (IOException | ClassNotFoundException e) { throw new FlinkRuntimeException("Could not copy element via serialization: " + from, e); } }
Example #24
Source Project: Flink-CEPplus Author: ljygz File: StreamConfig.java License: Apache License 2.0 | 5 votes |
public void setNonChainedOutputs(List<StreamEdge> outputvertexIDs) { try { InstantiationUtil.writeObjectToConfig(outputvertexIDs, this.config, NONCHAINED_OUTPUTS); } catch (IOException e) { throw new StreamTaskException("Cannot serialize non chained outputs.", e); } }
Example #25
Source Project: flink Author: flink-tpc-ds File: StreamConfig.java License: Apache License 2.0 | 5 votes |
public <T> List<OutputSelector<T>> getOutputSelectors(ClassLoader userCodeClassloader) { try { List<OutputSelector<T>> selectors = InstantiationUtil.readObjectFromConfig(this.config, OUTPUT_SELECTOR_WRAPPER, userCodeClassloader); return selectors == null ? Collections.<OutputSelector<T>>emptyList() : selectors; } catch (Exception e) { throw new StreamTaskException("Could not read output selectors", e); } }
Example #26
Source Project: flink Author: flink-tpc-ds File: GenericTypeComparator.java License: Apache License 2.0 | 5 votes |
@Override public int getNormalizeKeyLen() { if (this.reference == null) { this.reference = InstantiationUtil.instantiate(this.type); } NormalizableKey<?> key = (NormalizableKey<?>) this.reference; return key.getMaxNormalizedKeyLen(); }
Example #27
Source Project: flink Author: apache File: ElasticsearchResource.java License: Apache License 2.0 | 5 votes |
@Override protected void before() throws Throwable { LOG.info("-------------------------------------------------------------------------"); LOG.info(" Starting embedded Elasticsearch node "); LOG.info("-------------------------------------------------------------------------"); // dynamically load version-specific implementation of the Elasticsearch embedded node environment Class<?> clazz = Class.forName( "org.apache.flink.streaming.connectors.elasticsearch.EmbeddedElasticsearchNodeEnvironmentImpl"); embeddedNodeEnv = (EmbeddedElasticsearchNodeEnvironment) InstantiationUtil.instantiate(clazz); tempFolder.create(); embeddedNodeEnv.start(tempFolder.newFolder(), clusterName); }
Example #28
Source Project: flink Author: apache File: JobGraph.java License: Apache License 2.0 | 5 votes |
public void setUserArtifactBlobKey(String entryName, PermanentBlobKey blobKey) throws IOException { byte[] serializedBlobKey; serializedBlobKey = InstantiationUtil.serializeObject(blobKey); userArtifacts.computeIfPresent(entryName, (key, originalEntry) -> new DistributedCache.DistributedCacheEntry( originalEntry.filePath, originalEntry.isExecutable, serializedBlobKey, originalEntry.isZipped )); }
Example #29
Source Project: flink Author: apache File: ValueComparator.java License: Apache License 2.0 | 5 votes |
@Override public int getNormalizeKeyLen() { if (reference == null) { reference = InstantiationUtil.instantiate(type, Value.class); } NormalizableKey<?> key = (NormalizableKey<?>) reference; return key.getMaxNormalizedKeyLen(); }
Example #30
Source Project: flink Author: apache File: EncodingUtils.java License: Apache License 2.0 | 5 votes |
public static String encodeObjectToString(Serializable obj) { try { final byte[] bytes = InstantiationUtil.serializeObject(obj); return new String(BASE64_ENCODER.encode(bytes), UTF_8); } catch (Exception e) { throw new ValidationException( "Unable to serialize object '" + obj.toString() + "' of class '" + obj.getClass().getName() + "'."); } }