Java Code Examples for com.google.cloud.bigquery.StandardSQLTypeName

The following examples show how to use com.google.cloud.bigquery.StandardSQLTypeName. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: datacollector   Source File: BigQueryDelegate.java    License: Apache License 2.0 5 votes vote down vote up
public Field fromPrimitiveField(com.google.cloud.bigquery.Field field, FieldValue value) {
  Field.Type type = asRecordFieldType(field);
  Field f;
  if (value.isNull()) {
    f = Field.create(type, null);
  } else if (TEMPORAL_TYPES.contains(type)) {
    if (field.getType().getStandardType() == StandardSQLTypeName.TIMESTAMP) {
      // in general only TIMESTAMP should be a Unix epoch value.  However, to be certain will test for getTimeStampValue() and process as as string format if that fails
      f = Field.create(type, value.getTimestampValue() / 1000L); // micro to milli
    } else {
      // not a timestamp value.  Assume it's a date string format
      // Other BigQuery temporal types come as string representations.
      try {
        String dval = value.getStringValue();
        if (dval != null) {
          dval = stripMicrosec.matcher(dval).replaceAll("$1"); // strip out microseconds, for milli precision
        }
        f = Field.create(type, dateTimeFormatter.apply(field).parse(dval));
      } catch (ParseException ex) {
        // In case of failed date/time parsing, simply allow the value to proceed as a string
        LOG.error(String.format("Unable to convert BigQuery field type %s to field type %s", field.getType().toString(), type.toString()), ex);
        f = Field.create(Field.Type.STRING, value.getStringValue()); // allow it to proceed with a null value
        f.setAttribute("bq.parseException", String.format("%s using format %s", ex.getMessage(), dateTimeFormatter.apply(field).toPattern()) );
      }
    }
    if (f != null) f.setAttribute("bq.fullValue", value.getStringValue()); // add the parse error as a field header
  } else if (type == Field.Type.BYTE_ARRAY) {
    f = Field.create(type, value.getBytesValue());
  } else {
    f = Field.create(type, value.getValue());
  }
  return f;
}
 
Example 2
private Optional<JsonNode> coerceSingleValueToBqType(JsonNode o, Field field) {
  if (field.getType() == LegacySQLTypeName.STRING) {
    if (o.isTextual()) {
      return Optional.of(o);
    } else {
      // If not already a string, we JSON-ify the value.
      // We have many fields that we expect to be coerced to string (histograms, userPrefs,
      // etc.)
      // so no point in maintaining a counter here as it will quickly reach many billions.
      return Optional.of(TextNode.valueOf(Json.asString(o)));
    }
    // Our BigQuery schemas use Standard SQL type names, but the BQ API expects legacy SQL
    // type names, so we end up with technically invalid types of INT64 that we need to
    // check for.
  } else if (field.getType() == LegacySQLTypeName.INTEGER
      || StandardSQLTypeName.INT64.name().equals(field.getType().name())) {
    if (o.isInt() || o.isLong()) {
      return Optional.of(o);
    } else if (o.isBoolean()) {
      incrementCoercedToInt();
      // We assume that false is equivalent to zero and true to 1.
      return Optional.of(IntNode.valueOf(o.asBoolean() ? 1 : 0));
    } else {
      incrementNotCoercedToInt();
      return Optional.empty();
    }
    // Our BigQuery schemas use Standard SQL type names, but the BQ API expects legacy SQL
    // type names, so we may end up with technically invalid types of BOOL that we need to
    // check for.
  } else if (field.getType() == LegacySQLTypeName.BOOLEAN
      || StandardSQLTypeName.BOOL.name().equals(field.getType().name())) {
    if (o.isBoolean()) {
      return Optional.of(o);
    } else {
      incrementNotCoercedToBool();
      return Optional.empty();
    }
  } else {
    return Optional.of(o);
  }
}
 
Example 3
Source Project: DataflowTemplates   Source File: BigQuerySchemaUtils.java    License: Apache License 2.0 4 votes vote down vote up
private static Field beamFieldToBigQueryClientField(
    org.apache.beam.sdk.schemas.Schema.Field beamField) {
  TypeName beamTypeName = beamField.getType().getTypeName();
  StandardSQLTypeName bigQueryTypeName;
  switch (beamTypeName) {
    case BYTES:
    case BYTE:
      bigQueryTypeName = StandardSQLTypeName.BYTES;
      break;
    case FLOAT:
    case DOUBLE:
      bigQueryTypeName = StandardSQLTypeName.FLOAT64;
      break;
    case INT16:
    case INT32:
    case INT64:
      bigQueryTypeName = StandardSQLTypeName.INT64;
      break;
    case STRING:
      bigQueryTypeName = StandardSQLTypeName.STRING;
      break;
    case DATETIME:
      bigQueryTypeName = StandardSQLTypeName.DATETIME;
      break;
    case BOOLEAN:
      bigQueryTypeName = StandardSQLTypeName.BOOL;
      break;
    case DECIMAL:
      bigQueryTypeName = StandardSQLTypeName.NUMERIC;
      break;
    case MAP:
    case ROW:
    case ARRAY:
    case LOGICAL_TYPE:
    default:
      // In the first version of this solution, we will not support sub-fields of type
      // MAP/ROW/ARRAY/LOGICAL_TYPE. These may become necessary as the solution expands to other
      // databases.
      throw new IllegalArgumentException(
          String.format("Unsupported field type: %s", beamTypeName));
  }
  return Field.newBuilder(beamField.getName(), bigQueryTypeName).build();
}
 
Example 4
Source Project: feast   Source File: TypeUtil.java    License: Apache License 2.0 2 votes vote down vote up
/**
 * Converts {@link feast.proto.types.ValueProto.ValueType} to its corresponding {@link
 * StandardSQLTypeName}
 *
 * @param valueType value type to convert
 * @return {@link StandardSQLTypeName}
 */
public static StandardSQLTypeName toStandardSqlType(ValueProto.ValueType.Enum valueType) {
  return VALUE_TYPE_TO_STANDARD_SQL_TYPE.get(valueType);
}