Java Code Examples for com.google.cloud.bigquery.Field#getName()

The following examples show how to use com.google.cloud.bigquery.Field#getName() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SchemaConverters.java    From spark-bigquery-connector with Apache License 2.0 6 votes vote down vote up
/**
 * Create a function that converts an Avro row with the given BigQuery schema to a Spark SQL row
 * <p>
 * The conversion is based on the BigQuery schema, not Avro Schema, because the Avro schema is
 * very painful to use.
 * <p>
 * Not guaranteed to be stable across all versions of Spark.
 */

private static StructField convert(Field field) {
    DataType dataType = getDataType(field);
    boolean nullable = true;

    if (field.getMode() == Field.Mode.REQUIRED) {
        nullable = false;
    } else if (field.getMode() == Field.Mode.REPEATED) {
        dataType = new ArrayType(dataType, true);
    }

    MetadataBuilder metadata = new MetadataBuilder();
    if (field.getDescription() != null) {
        metadata.putString("description", field.getDescription());
    }

    return new StructField(field.getName(), dataType, nullable, metadata.build());
}
 
Example 2
Source File: Conversions.java    From presto with Apache License 2.0 6 votes vote down vote up
static BigQueryColumnHandle toColumnHandle(Field field)
{
    FieldList subFields = field.getSubFields();
    List<BigQueryColumnHandle> subColumns = subFields == null ?
            Collections.emptyList() :
            subFields.stream()
                    .map(Conversions::toColumnHandle)
                    .collect(Collectors.toList());
    return new BigQueryColumnHandle(
            field.getName(),
            BigQueryType.valueOf(field.getType().name()),
            getMode(field),
            subColumns,
            field.getDescription(),
            false);
}
 
Example 3
Source File: BeamBQInputDialog.java    From kettle-beam with Apache License 2.0 5 votes vote down vote up
public void getFields() {
  try {

    BeamBQInputMeta meta = new BeamBQInputMeta();
    getInfo(meta);

    BigQuery bigQuery = BigQueryOptions.getDefaultInstance().getService();

    if ( StringUtils.isNotEmpty( meta.getDatasetId() ) &&
         StringUtils.isNotEmpty( meta.getTableId() )) {

      Table table = bigQuery.getTable(
        transMeta.environmentSubstitute( meta.getDatasetId()),
        transMeta.environmentSubstitute( meta.getTableId() )
      );

      TableDefinition definition = table.getDefinition();
      Schema schema = definition.getSchema();
      FieldList fieldList = schema.getFields();

      RowMetaInterface rowMeta = new RowMeta();
      for ( int i = 0; i< fieldList.size(); i++) {
        Field field = fieldList.get( i );

        String name = field.getName();
        String type = field.getType().name();

        int kettleType = BQSchemaAndRecordToKettleFn.AvroType.valueOf( type ).getKettleType();
        rowMeta.addValueMeta( ValueMetaFactory.createValueMeta( name, kettleType ) );
      }

      BaseStepDialog.getFieldsFromPrevious( rowMeta, wFields, 1, new int[] { 1 }, new int[] { 3 }, -1, -1, true, null );
    }

  } catch ( Exception e ) {
    new ErrorDialog( shell, "Error", "Error getting BQ fields", e );
  }
}
 
Example 4
Source File: BigQueryAvroRegistry.java    From components with Apache License 2.0 5 votes vote down vote up
private org.apache.avro.Schema inferBigQuerySchema(Schema schema) {
    List<Field> bqFields = schema.getFields();
    if (bqFields.size() == 0) {
        return SchemaBuilder.builder().record("EmptyRecord").fields().endRecord();
    }

    SchemaBuilder.FieldAssembler<org.apache.avro.Schema> fieldAssembler =
            SchemaBuilder.record("BigQuerySchema").fields();
    for (Field bqField : bqFields) {
        String name = bqField.getName();
        org.apache.avro.Schema fieldSchema = inferSchemaField(bqField);
        fieldAssembler = fieldAssembler.name(name).type(fieldSchema).noDefault();
    }
    return fieldAssembler.endRecord();
}
 
Example 5
Source File: BigQueryAvroRegistry.java    From components with Apache License 2.0 4 votes vote down vote up
/**
 * All BigQuery types except Record/Struct and Arrays, no matter legacy or not, as {@link LegacySQLTypeName} is a
 * wrapper for {@link StandardSQLTypeName}
 */
private org.apache.avro.Schema inferSchemaFieldWithoutMode(Field field) {
    LegacySQLTypeName sqlType = field.getType();
    switch (sqlType.getStandardType()) {
        case STRUCT:
        String name = field.getName();
        // Struct type
        // https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#struct-type
        SchemaBuilder.FieldAssembler<org.apache.avro.Schema> itemFieldAssembler =
                SchemaBuilder.record(name).fields();
        for (Field itemField : field.getSubFields()) {
            itemFieldAssembler.name(itemField.getName()).type(inferSchemaField(itemField)).noDefault();
        }
        org.apache.avro.Schema recordSchema = itemFieldAssembler.endRecord();
        return recordSchema;
    case BYTES:
        return AvroUtils._bytes();
    case INT64:
        return AvroUtils._long();
    case FLOAT64:
        return AvroUtils._double();
    case BOOL:
        return AvroUtils._boolean();
    case DATETIME:
        org.apache.avro.Schema schemaDT = AvroUtils._string();
        schemaDT.addProp(TALEND_COLUMN_DB_TYPE, BigQueryType.DATETIME.toString());
        return schemaDT;
    case DATE:
        org.apache.avro.Schema schemaD = AvroUtils._string();
        schemaD.addProp(TALEND_COLUMN_DB_TYPE, BigQueryType.DATE.toString());
        return schemaD;
    // return LogicalTypes.date().addToSchema(AvroUtils._int());
    case TIME:
        org.apache.avro.Schema schemaT = AvroUtils._string();
        schemaT.addProp(TALEND_COLUMN_DB_TYPE, BigQueryType.TIME.toString());
        return schemaT;
    // return LogicalTypes.timeMicros().addToSchema(AvroUtils._long());
    case TIMESTAMP:
        org.apache.avro.Schema schemaTS = AvroUtils._string();
        schemaTS.addProp(TALEND_COLUMN_DB_TYPE, BigQueryType.TIMESTAMP.toString());
        return schemaTS;
    // return LogicalTypes.timestampMicros().addToSchema(AvroUtils._long());
    case STRING:
        return AvroUtils._string();
    default:
        // This should never occur.
        throw new RuntimeException("The BigQuery data type " + sqlType + " is not handled.");
    }
}