Java Code Examples for org.apache.hadoop.conf.Configuration#readFields()
The following examples show how to use
org.apache.hadoop.conf.Configuration#readFields() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HCatInputFormatBase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { this.fieldNames = new String[in.readInt()]; for (int i = 0; i < this.fieldNames.length; i++) { this.fieldNames[i] = in.readUTF(); } Configuration configuration = new Configuration(); configuration.readFields(in); if (this.configuration == null) { this.configuration = configuration; } this.hCatInputFormat = new org.apache.hive.hcatalog.mapreduce.HCatInputFormat(); this.outputSchema = (HCatSchema) HCatUtil.deserialize(this.configuration.get("mapreduce.lib.hcat.output.schema")); }
Example 2
Source File: HCatInputFormatBase.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { this.fieldNames = new String[in.readInt()]; for (int i = 0; i < this.fieldNames.length; i++) { this.fieldNames[i] = in.readUTF(); } Configuration configuration = new Configuration(); configuration.readFields(in); if (this.configuration == null) { this.configuration = configuration; } this.hCatInputFormat = new org.apache.hive.hcatalog.mapreduce.HCatInputFormat(); this.outputSchema = (HCatSchema) HCatUtil.deserialize(this.configuration.get("mapreduce.lib.hcat.output.schema")); }
Example 3
Source File: HCatInputFormatBase.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { this.fieldNames = new String[in.readInt()]; for (int i = 0; i < this.fieldNames.length; i++) { this.fieldNames[i] = in.readUTF(); } Configuration configuration = new Configuration(); configuration.readFields(in); if (this.configuration == null) { this.configuration = configuration; } this.hCatInputFormat = new org.apache.hive.hcatalog.mapreduce.HCatInputFormat(); this.outputSchema = (HCatSchema) HCatUtil.deserialize(this.configuration.get("mapreduce.lib.hcat.output.schema")); }
Example 4
Source File: OrcInputFormat.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { batchSize = in.readInt(); Configuration configuration = new Configuration(); configuration.readFields(in); if (this.conf == null) { this.conf = configuration; } this.schema = TypeDescription.fromString(in.readUTF()); this.selectedFields = new int[in.readInt()]; for (int i = 0; i < selectedFields.length; i++) { this.selectedFields[i] = in.readInt(); } this.conjunctPredicates = new ArrayList<>(); int numPreds = in.readInt(); for (int i = 0; i < numPreds; i++) { conjunctPredicates.add((Predicate) in.readObject()); } }
Example 5
Source File: SerializableHadoopConfiguration.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private void readObject(ObjectInputStream in) throws IOException { final Configuration config = new Configuration(); config.readFields(in); if (this.hadoopConfig == null) { this.hadoopConfig = config; } }
Example 6
Source File: SerializableHadoopConfiguration.java From flink with Apache License 2.0 | 5 votes |
private void readObject(ObjectInputStream in) throws IOException { final Configuration config = new Configuration(); config.readFields(in); if (this.hadoopConfig == null) { this.hadoopConfig = config; } }
Example 7
Source File: LargeBinaryDocument.java From marklogic-contentpump with Apache License 2.0 | 5 votes |
@Override public void readFields(DataInput in) throws IOException { super.readFields(in); path = new Path(Text.readString(in)); offset = in.readLong(); size = in.readLong(); binaryOrigLen = in.readLong(); conf = new Configuration(); conf.readFields(in); }
Example 8
Source File: SerializableConfiguration.java From beam with Apache License 2.0 | 5 votes |
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { String className = in.readUTF(); try { conf = (Configuration) Class.forName(className).getDeclaredConstructor().newInstance(); conf.readFields(in); } catch (InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) { throw new IOException("Unable to create configuration: " + e); } }
Example 9
Source File: HadoopFormatIO.java From beam with Apache License 2.0 | 5 votes |
@Override public Configuration decode(InputStream inStream) throws IOException { DataInputStream dataInputStream = new DataInputStream(inStream); Configuration config = new Configuration(false); config.readFields(dataInputStream); return config; }
Example 10
Source File: SerializableHadoopConfiguration.java From flink with Apache License 2.0 | 5 votes |
private void readObject(ObjectInputStream in) throws IOException { final Configuration config = new Configuration(); config.readFields(in); if (this.hadoopConfig == null) { this.hadoopConfig = config; } }
Example 11
Source File: TaskSpec.java From tez with Apache License 2.0 | 5 votes |
@Override public void readFields(DataInput in) throws IOException { taskAttemptId = TezTaskAttemptID.readTezTaskAttemptID(in); dagName = StringInterner.weakIntern(in.readUTF()); vertexName = StringInterner.weakIntern(in.readUTF()); vertexParallelism = in.readInt(); // TODO TEZ-305 convert this to PB processorDescriptor = new ProcessorDescriptor(); processorDescriptor.readFields(in); int numInputSpecs = in.readInt(); inputSpecList = new ArrayList<InputSpec>(numInputSpecs); for (int i = 0; i < numInputSpecs; i++) { InputSpec inputSpec = new InputSpec(); inputSpec.readFields(in); inputSpecList.add(inputSpec); } int numOutputSpecs = in.readInt(); outputSpecList = new ArrayList<OutputSpec>(numOutputSpecs); for (int i = 0; i < numOutputSpecs; i++) { OutputSpec outputSpec = new OutputSpec(); outputSpec.readFields(in); outputSpecList.add(outputSpec); } boolean hasGroupInputs = in.readBoolean(); if (hasGroupInputs) { int numGroups = in.readInt(); groupInputSpecList = Lists.newArrayListWithCapacity(numGroups); for (int i=0; i<numGroups; ++i) { GroupInputSpec group = new GroupInputSpec(); group.readFields(in); groupInputSpecList.add(group); } } boolean hasVertexConf = in.readBoolean(); if (hasVertexConf) { taskConf = new Configuration(false); taskConf.readFields(in); } }
Example 12
Source File: SerializableConfiguration.java From iceberg with Apache License 2.0 | 4 votes |
private void readObject(ObjectInputStream in) throws ClassNotFoundException, IOException { in.defaultReadObject(); hadoopConf = new Configuration(false); hadoopConf.readFields(in); }
Example 13
Source File: SerializableConfiguration.java From iceberg with Apache License 2.0 | 4 votes |
private void readObject(ObjectInputStream in) throws ClassNotFoundException, IOException { in.defaultReadObject(); hadoopConf = new Configuration(false); hadoopConf.readFields(in); }
Example 14
Source File: SerializableConfiguration.java From hudi with Apache License 2.0 | 4 votes |
private void readObject(ObjectInputStream in) throws IOException { configuration = new Configuration(false); configuration.readFields(in); }