Java Code Examples for org.apache.iceberg.FileFormat#valueOf()

The following examples show how to use org.apache.iceberg.FileFormat#valueOf() . These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: iceberg   File: RowDataRewriter.java    License: Apache License 2.0 6 votes vote down vote up
public RowDataRewriter(Table table, PartitionSpec spec, boolean caseSensitive,
                       Broadcast<FileIO> io, Broadcast<EncryptionManager> encryptionManager) {
  this.schema = table.schema();
  this.spec = spec;
  this.locations = table.locationProvider();
  this.properties = table.properties();
  this.io = io;
  this.encryptionManager = encryptionManager;

  this.caseSensitive = caseSensitive;
  this.nameMapping = table.properties().get(DEFAULT_NAME_MAPPING);

  String formatString = table.properties().getOrDefault(
      TableProperties.DEFAULT_FILE_FORMAT, TableProperties.DEFAULT_FILE_FORMAT_DEFAULT);
  this.format = FileFormat.valueOf(formatString.toUpperCase(Locale.ENGLISH));
}
 
Example 2
Source Project: presto   File: IcebergConfig.java    License: Apache License 2.0 4 votes vote down vote up
public FileFormat getFileFormat()
{
    return FileFormat.valueOf(fileFormat.name());
}
 
Example 3
Source Project: presto   File: IcebergUtil.java    License: Apache License 2.0 4 votes vote down vote up
public static FileFormat getFileFormat(Table table)
{
    return FileFormat.valueOf(table.properties()
            .getOrDefault(DEFAULT_FILE_FORMAT, DEFAULT_FILE_FORMAT_DEFAULT)
            .toUpperCase(Locale.ENGLISH));
}
 
Example 4
Source Project: iceberg   File: Writer.java    License: Apache License 2.0 4 votes vote down vote up
private FileFormat getFileFormat(Map<String, String> tableProperties, DataSourceOptions options) {
  Optional<String> formatOption = options.get("write-format");
  String formatString = formatOption
      .orElse(tableProperties.getOrDefault(DEFAULT_FILE_FORMAT, DEFAULT_FILE_FORMAT_DEFAULT));
  return FileFormat.valueOf(formatString.toUpperCase(Locale.ENGLISH));
}
 
Example 5
Source Project: iceberg   File: TestFilteredScan.java    License: Apache License 2.0 4 votes vote down vote up
@Before
public void writeUnpartitionedTable() throws IOException {
  this.parent = temp.newFolder("TestFilteredScan");
  this.unpartitioned = new File(parent, "unpartitioned");
  File dataFolder = new File(unpartitioned, "data");
  Assert.assertTrue("Mkdir should succeed", dataFolder.mkdirs());

  Table table = TABLES.create(SCHEMA, PartitionSpec.unpartitioned(), unpartitioned.toString());
  Schema tableSchema = table.schema(); // use the table schema because ids are reassigned

  FileFormat fileFormat = FileFormat.valueOf(format.toUpperCase(Locale.ENGLISH));

  File testFile = new File(dataFolder, fileFormat.addExtension(UUID.randomUUID().toString()));

  // create records using the table's schema
  this.records = testRecords(tableSchema);

  switch (fileFormat) {
    case AVRO:
      try (FileAppender<Record> writer = Avro.write(localOutput(testFile))
          .createWriterFunc(DataWriter::create)
          .schema(tableSchema)
          .build()) {
        writer.addAll(records);
      }
      break;

    case PARQUET:
      try (FileAppender<Record> writer = Parquet.write(localOutput(testFile))
          .createWriterFunc(GenericParquetWriter::buildWriter)
          .schema(tableSchema)
          .build()) {
        writer.addAll(records);
      }
      break;

    case ORC:
      try (FileAppender<Record> writer = ORC.write(localOutput(testFile))
          .createWriterFunc(GenericOrcWriter::buildWriter)
          .schema(tableSchema)
          .build()) {
        writer.addAll(records);
      }
      break;
  }

  DataFile file = DataFiles.builder(PartitionSpec.unpartitioned())
      .withRecordCount(records.size())
      .withFileSizeInBytes(testFile.length())
      .withPath(testFile.toString())
      .build();

  table.newAppend().appendFile(file).commit();
}
 
Example 6
Source Project: iceberg   File: TestLocalScan.java    License: Apache License 2.0 4 votes vote down vote up
public TestLocalScan(String format) {
  this.format = FileFormat.valueOf(format.toUpperCase(Locale.ENGLISH));
}
 
Example 7
public TestMetricsRowGroupFilter(String format) {
  this.format = FileFormat.valueOf(format.toUpperCase(Locale.ENGLISH));
}
 
Example 8
public TestMetricsRowGroupFilterTypes(String format, String column, Object readValue, Object skipValue) {
  this.format = FileFormat.valueOf(format.toUpperCase(Locale.ENGLISH));
  this.column = column;
  this.readValue = readValue;
  this.skipValue = skipValue;
}
 
Example 9
Source Project: iceberg   File: TestIcebergInputFormat.java    License: Apache License 2.0 4 votes vote down vote up
public TestIcebergInputFormat(String format) {
  this.format = FileFormat.valueOf(format.toUpperCase(Locale.ENGLISH));
}
 
Example 10
Source Project: iceberg   File: SparkDataFile.java    License: Apache License 2.0 4 votes vote down vote up
@Override
public FileFormat format() {
  String formatAsString = wrapped.getString(fileFormatPosition).toUpperCase(Locale.ROOT);
  return FileFormat.valueOf(formatAsString);
}
 
Example 11
Source Project: iceberg   File: TestSparkReadProjection.java    License: Apache License 2.0 4 votes vote down vote up
public TestSparkReadProjection(String format, boolean vectorized) {
  super(format);
  this.format = FileFormat.valueOf(format.toUpperCase(Locale.ROOT));
  this.vectorized = vectorized;
}
 
Example 12
Source Project: iceberg   File: TestSparkDataWrite.java    License: Apache License 2.0 4 votes vote down vote up
public TestSparkDataWrite(String format) {
  this.format = FileFormat.valueOf(format.toUpperCase(Locale.ENGLISH));
}
 
Example 13
Source Project: iceberg   File: SparkBatchWrite.java    License: Apache License 2.0 4 votes vote down vote up
protected FileFormat getFileFormat(Map<String, String> tableProperties, Map<String, String> options) {
  Optional<String> formatOption = Optional.ofNullable(options.get("write-format"));
  String formatString = formatOption
      .orElse(tableProperties.getOrDefault(DEFAULT_FILE_FORMAT, DEFAULT_FILE_FORMAT_DEFAULT));
  return FileFormat.valueOf(formatString.toUpperCase(Locale.ENGLISH));
}
 
Example 14
Source Project: iceberg   File: TestFilteredScan.java    License: Apache License 2.0 4 votes vote down vote up
@Before
public void writeUnpartitionedTable() throws IOException {
  this.parent = temp.newFolder("TestFilteredScan");
  this.unpartitioned = new File(parent, "unpartitioned");
  File dataFolder = new File(unpartitioned, "data");
  Assert.assertTrue("Mkdir should succeed", dataFolder.mkdirs());

  Table table = TABLES.create(SCHEMA, PartitionSpec.unpartitioned(), unpartitioned.toString());
  Schema tableSchema = table.schema(); // use the table schema because ids are reassigned

  FileFormat fileFormat = FileFormat.valueOf(format.toUpperCase(Locale.ENGLISH));

  File testFile = new File(dataFolder, fileFormat.addExtension(UUID.randomUUID().toString()));

  // create records using the table's schema
  org.apache.avro.Schema avroSchema = AvroSchemaUtil.convert(tableSchema, "test");
  this.records = testRecords(avroSchema);

  switch (fileFormat) {
    case AVRO:
      try (FileAppender<Record> writer = Avro.write(localOutput(testFile))
          .schema(tableSchema)
          .build()) {
        writer.addAll(records);
      }
      break;

    case PARQUET:
      try (FileAppender<Record> writer = Parquet.write(localOutput(testFile))
          .schema(tableSchema)
          .build()) {
        writer.addAll(records);
      }
      break;
  }

  DataFile file = DataFiles.builder(PartitionSpec.unpartitioned())
      .withRecordCount(records.size())
      .withFileSizeInBytes(testFile.length())
      .withPath(testFile.toString())
      .build();

  table.newAppend().appendFile(file).commit();
}