Java Code Examples for org.apache.hadoop.conf.Configuration#readFields()

The following examples show how to use org.apache.hadoop.conf.Configuration#readFields() . These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may want to check out the right sidebar which shows the related API usage.
Example 1
@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
	this.fieldNames = new String[in.readInt()];
	for (int i = 0; i < this.fieldNames.length; i++) {
		this.fieldNames[i] = in.readUTF();
	}

	Configuration configuration = new Configuration();
	configuration.readFields(in);

	if (this.configuration == null) {
		this.configuration = configuration;
	}

	this.hCatInputFormat = new org.apache.hive.hcatalog.mapreduce.HCatInputFormat();
	this.outputSchema = (HCatSchema) HCatUtil.deserialize(this.configuration.get("mapreduce.lib.hcat.output.schema"));
}
 
Example 2
Source Project: flink   File: HCatInputFormatBase.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
	this.fieldNames = new String[in.readInt()];
	for (int i = 0; i < this.fieldNames.length; i++) {
		this.fieldNames[i] = in.readUTF();
	}

	Configuration configuration = new Configuration();
	configuration.readFields(in);

	if (this.configuration == null) {
		this.configuration = configuration;
	}

	this.hCatInputFormat = new org.apache.hive.hcatalog.mapreduce.HCatInputFormat();
	this.outputSchema = (HCatSchema) HCatUtil.deserialize(this.configuration.get("mapreduce.lib.hcat.output.schema"));
}
 
Example 3
Source Project: flink   File: HCatInputFormatBase.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
	this.fieldNames = new String[in.readInt()];
	for (int i = 0; i < this.fieldNames.length; i++) {
		this.fieldNames[i] = in.readUTF();
	}

	Configuration configuration = new Configuration();
	configuration.readFields(in);

	if (this.configuration == null) {
		this.configuration = configuration;
	}

	this.hCatInputFormat = new org.apache.hive.hcatalog.mapreduce.HCatInputFormat();
	this.outputSchema = (HCatSchema) HCatUtil.deserialize(this.configuration.get("mapreduce.lib.hcat.output.schema"));
}
 
Example 4
Source Project: flink   File: OrcInputFormat.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
	batchSize = in.readInt();
	Configuration configuration = new Configuration();
	configuration.readFields(in);

	if (this.conf == null) {
		this.conf = configuration;
	}
	this.schema = TypeDescription.fromString(in.readUTF());

	this.selectedFields = new int[in.readInt()];
	for (int i = 0; i < selectedFields.length; i++) {
		this.selectedFields[i] = in.readInt();
	}

	this.conjunctPredicates = new ArrayList<>();
	int numPreds = in.readInt();
	for (int i = 0; i < numPreds; i++) {
		conjunctPredicates.add((Predicate) in.readObject());
	}
}
 
Example 5
private void readObject(ObjectInputStream in) throws IOException {
	final Configuration config = new Configuration();
	config.readFields(in);

	if (this.hadoopConfig == null) {
		this.hadoopConfig = config;
	}
}
 
Example 6
private void readObject(ObjectInputStream in) throws IOException {
	final Configuration config = new Configuration();
	config.readFields(in);

	if (this.hadoopConfig == null) {
		this.hadoopConfig = config;
	}
}
 
Example 7
@Override
public void readFields(DataInput in) throws IOException {
    super.readFields(in);
    path = new Path(Text.readString(in));
    offset = in.readLong();
    size = in.readLong();
    binaryOrigLen = in.readLong();
    conf = new Configuration();
    conf.readFields(in);
}
 
Example 8
Source Project: beam   File: SerializableConfiguration.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
  String className = in.readUTF();
  try {
    conf = (Configuration) Class.forName(className).getDeclaredConstructor().newInstance();
    conf.readFields(in);
  } catch (InstantiationException
      | IllegalAccessException
      | NoSuchMethodException
      | InvocationTargetException e) {
    throw new IOException("Unable to create configuration: " + e);
  }
}
 
Example 9
Source Project: beam   File: HadoopFormatIO.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public Configuration decode(InputStream inStream) throws IOException {
  DataInputStream dataInputStream = new DataInputStream(inStream);
  Configuration config = new Configuration(false);
  config.readFields(dataInputStream);

  return config;
}
 
Example 10
private void readObject(ObjectInputStream in) throws IOException {
	final Configuration config = new Configuration();
	config.readFields(in);

	if (this.hadoopConfig == null) {
		this.hadoopConfig = config;
	}
}
 
Example 11
Source Project: tez   File: TaskSpec.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void readFields(DataInput in) throws IOException {
  taskAttemptId = TezTaskAttemptID.readTezTaskAttemptID(in);
  dagName = StringInterner.weakIntern(in.readUTF());
  vertexName = StringInterner.weakIntern(in.readUTF());
  vertexParallelism = in.readInt();
  // TODO TEZ-305 convert this to PB
  processorDescriptor = new ProcessorDescriptor();
  processorDescriptor.readFields(in);
  int numInputSpecs = in.readInt();
  inputSpecList = new ArrayList<InputSpec>(numInputSpecs);
  for (int i = 0; i < numInputSpecs; i++) {
    InputSpec inputSpec = new InputSpec();
    inputSpec.readFields(in);
    inputSpecList.add(inputSpec);
  }
  int numOutputSpecs = in.readInt();
  outputSpecList = new ArrayList<OutputSpec>(numOutputSpecs);
  for (int i = 0; i < numOutputSpecs; i++) {
    OutputSpec outputSpec = new OutputSpec();
    outputSpec.readFields(in);
    outputSpecList.add(outputSpec);
  }
  boolean hasGroupInputs = in.readBoolean();
  if (hasGroupInputs) {
    int numGroups = in.readInt();
    groupInputSpecList = Lists.newArrayListWithCapacity(numGroups);
    for (int i=0; i<numGroups; ++i) {
      GroupInputSpec group = new GroupInputSpec();
      group.readFields(in);
      groupInputSpecList.add(group);
    }
  }
  boolean hasVertexConf = in.readBoolean();
  if (hasVertexConf) {
    taskConf = new Configuration(false);
    taskConf.readFields(in);
  }
}
 
Example 12
private void readObject(ObjectInputStream in) throws ClassNotFoundException, IOException {
  in.defaultReadObject();
  hadoopConf = new Configuration(false);
  hadoopConf.readFields(in);
}
 
Example 13
private void readObject(ObjectInputStream in) throws ClassNotFoundException, IOException {
  in.defaultReadObject();
  hadoopConf = new Configuration(false);
  hadoopConf.readFields(in);
}
 
Example 14
Source Project: hudi   File: SerializableConfiguration.java    License: Apache License 2.0 4 votes vote down vote up
private void readObject(ObjectInputStream in) throws IOException {
  configuration = new Configuration(false);
  configuration.readFields(in);
}