Java Code Examples for org.apache.calcite.sql.type.SqlTypeName#TIMESTAMP_WITH_LOCAL_TIME_ZONE

The following examples show how to use org.apache.calcite.sql.type.SqlTypeName#TIMESTAMP_WITH_LOCAL_TIME_ZONE . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DruidSqlCastConverter.java    From calcite with Apache License 2.0 7 votes vote down vote up
public static String dateTimeFormatString(final SqlTypeName sqlTypeName) {
  if (sqlTypeName == SqlTypeName.DATE) {
    return "yyyy-MM-dd";
  } else if (sqlTypeName == SqlTypeName.TIMESTAMP) {
    return "yyyy-MM-dd HH:mm:ss";
  } else if (sqlTypeName == SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE) {
    return "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
  } else {
    return null;
  }
}
 
Example 2
Source File: DruidJsonFilter.java    From calcite with Apache License 2.0 6 votes vote down vote up
/**
 * @param rexNode    rexNode to translate to Druid literal equivalante
 * @param rowType    rowType associated to rexNode
 * @param druidQuery druid Query
 *
 * @return non null string or null if it can not translate to valid Druid equivalent
 */
@Nullable
private static String toDruidLiteral(RexNode rexNode, RelDataType rowType,
    DruidQuery druidQuery) {
  final String val;
  final RexLiteral rhsLiteral = (RexLiteral) rexNode;
  if (SqlTypeName.NUMERIC_TYPES.contains(rhsLiteral.getTypeName())) {
    val = String.valueOf(RexLiteral.value(rhsLiteral));
  } else if (SqlTypeName.CHAR_TYPES.contains(rhsLiteral.getTypeName())) {
    val = String.valueOf(RexLiteral.stringValue(rhsLiteral));
  } else if (SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE == rhsLiteral.getTypeName()
      || SqlTypeName.TIMESTAMP == rhsLiteral.getTypeName()
      || SqlTypeName.DATE == rhsLiteral.getTypeName()) {
    Long millisSinceEpoch = DruidDateTimeUtils.literalValue(rexNode);
    if (millisSinceEpoch == null) {
      throw new AssertionError(
          "Cannot translate Literal" + rexNode + " of type "
              + rhsLiteral.getTypeName() + " to TimestampString");
    }
    val = DATE_FORMATTER.format(millisSinceEpoch);
  } else {
    // Don't know how to filter on this kind of literal.
    val = null;
  }
  return val;
}
 
Example 3
Source File: ExtractOperatorConversion.java    From calcite with Apache License 2.0 6 votes vote down vote up
@Override public String toDruidExpression(
    RexNode rexNode, RelDataType rowType, DruidQuery query) {

  final RexCall call = (RexCall) rexNode;
  final RexLiteral flag = (RexLiteral) call.getOperands().get(0);
  final TimeUnitRange calciteUnit = (TimeUnitRange) flag.getValue();
  final RexNode arg = call.getOperands().get(1);

  final String input = DruidExpressions.toDruidExpression(arg, rowType, query);
  if (input == null) {
    return null;
  }

  final String druidUnit = EXTRACT_UNIT_MAP.get(calciteUnit);
  if (druidUnit == null) {
    return null;
  }

  final TimeZone tz =
      arg.getType().getSqlTypeName() == SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE
          ? TimeZone.getTimeZone(query.getConnectionConfig().timeZone())
          : DateTimeUtils.UTC_ZONE;
  return DruidExpressions.applyTimeExtract(input, druidUnit, tz);
}
 
Example 4
Source File: SqlTimestampType.java    From flink with Apache License 2.0 5 votes vote down vote up
public SqlTypeName getSqlTypeName() {
	if (withLocalTimeZone) {
		return SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE;
	} else {
		return SqlTypeName.TIMESTAMP;
	}
}
 
Example 5
Source File: DruidSqlCastConverter.java    From calcite with Apache License 2.0 5 votes vote down vote up
private static String castCharToDateTime(
    TimeZone timeZone,
    String operand,
    final SqlTypeName toType, String format) {
  // Cast strings to date times by parsing them from SQL format.
  final String timestampExpression = DruidExpressions.functionCall(
      "timestamp_parse",
      ImmutableList.of(
          operand,
          DruidExpressions.stringLiteral(format),
          DruidExpressions.stringLiteral(timeZone.getID())));

  if (toType == SqlTypeName.DATE) {
    // case to date we need to floor to day first
    return DruidExpressions.applyTimestampFloor(
        timestampExpression,
        Period.days(1).toString(),
        "",
        timeZone);
  } else if (toType == SqlTypeName.TIMESTAMP
      || toType == SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE) {
    return timestampExpression;
  } else {
    throw new IllegalStateException(
        DruidQuery.format("Unsupported DateTime type[%s]", toType));
  }
}
 
Example 6
Source File: DruidQuery.java    From calcite with Apache License 2.0 5 votes vote down vote up
/** Check if it is needed to use UTC for DATE and TIMESTAMP types. **/
private static boolean needUtcTimeExtract(RexNode rexNode) {
  return rexNode.getType().getSqlTypeName() == SqlTypeName.DATE
      || rexNode.getType().getSqlTypeName() == SqlTypeName.TIMESTAMP
      || rexNode.getType().getSqlTypeName()
      == SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE;
}
 
Example 7
Source File: DruidDateTimeUtils.java    From calcite with Apache License 2.0 5 votes vote down vote up
/**
 * Infers granularity from a time unit.
 * It supports {@code FLOOR(<time> TO <timeunit>)}
 * and {@code EXTRACT(<timeunit> FROM <time>)}.
 * Returns null if it cannot be inferred.
 *
 * @param node the Rex node
 * @return the granularity, or null if it cannot be inferred
 */
@Nullable
public static Granularity extractGranularity(RexNode node, String timeZone) {
  final int valueIndex;
  final int flagIndex;

  if (TimeExtractionFunction.isValidTimeExtract(node)) {
    flagIndex = 0;
    valueIndex = 1;
  } else if (TimeExtractionFunction.isValidTimeFloor(node)) {
    valueIndex = 0;
    flagIndex = 1;
  } else {
    // We can only infer granularity from floor and extract.
    return null;
  }
  final RexCall call = (RexCall) node;
  final RexNode value = call.operands.get(valueIndex);
  final RexLiteral flag = (RexLiteral) call.operands.get(flagIndex);
  final TimeUnitRange timeUnit = (TimeUnitRange) flag.getValue();

  final RelDataType valueType = value.getType();
  if (valueType.getSqlTypeName() == SqlTypeName.DATE
      || valueType.getSqlTypeName() == SqlTypeName.TIMESTAMP) {
    // We use 'UTC' for date/timestamp type as Druid needs timezone information
    return Granularities.createGranularity(timeUnit, "UTC");
  } else if (valueType.getSqlTypeName() == SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE) {
    return Granularities.createGranularity(timeUnit, timeZone);
  }
  // Type not recognized
  return null;
}
 
Example 8
Source File: FloorOperatorConversion.java    From calcite with Apache License 2.0 5 votes vote down vote up
@Nullable
@Override public String toDruidExpression(RexNode rexNode, RelDataType rowType,
    DruidQuery druidQuery) {
  final RexCall call = (RexCall) rexNode;
  final RexNode arg = call.getOperands().get(0);
  final String druidExpression = DruidExpressions.toDruidExpression(
      arg,
      rowType,
      druidQuery);
  if (druidExpression == null) {
    return null;
  } else if (call.getOperands().size() == 1) {
    // case FLOOR(expr)
    return  DruidQuery.format("floor(%s)", druidExpression);
  } else if (call.getOperands().size() == 2) {
    // FLOOR(expr TO timeUnit)
    final TimeZone tz;
    if (arg.getType().getSqlTypeName() == SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE) {
      tz = TimeZone.getTimeZone(druidQuery.getConnectionConfig().timeZone());
    } else {
      tz = DateTimeUtils.UTC_ZONE;
    }
    final Granularity granularity = DruidDateTimeUtils
        .extractGranularity(call, tz.getID());
    if (granularity == null) {
      return null;
    }
    String isoPeriodFormat = DruidDateTimeUtils.toISOPeriodFormat(granularity.getType());
    if (isoPeriodFormat == null) {
      return null;
    }
    return DruidExpressions.applyTimestampFloor(
        druidExpression,
        isoPeriodFormat,
        "",
        tz);
  } else {
    return null;
  }
}
 
Example 9
Source File: CeilOperatorConversion.java    From calcite with Apache License 2.0 5 votes vote down vote up
@Nullable
@Override public String toDruidExpression(RexNode rexNode, RelDataType rowType,
    DruidQuery query) {
  final RexCall call = (RexCall) rexNode;
  final RexNode arg = call.getOperands().get(0);
  final String druidExpression = DruidExpressions.toDruidExpression(
      arg,
      rowType,
      query);
  if (druidExpression == null) {
    return null;
  } else if (call.getOperands().size() == 1) {
    // case CEIL(expr)
    return  DruidQuery.format("ceil(%s)", druidExpression);
  } else if (call.getOperands().size() == 2) {
    // CEIL(expr TO timeUnit)
    final RexLiteral flag = (RexLiteral) call.getOperands().get(1);
    final TimeUnitRange timeUnit = (TimeUnitRange) flag.getValue();
    final Granularity.Type type = DruidDateTimeUtils.toDruidGranularity(timeUnit);
    if (type == null) {
      // Unknown Granularity bail out
      return null;
    }
    String isoPeriodFormat = DruidDateTimeUtils.toISOPeriodFormat(type);
    if (isoPeriodFormat == null) {
      return null;
    }
    final TimeZone tz;
    if (arg.getType().getSqlTypeName() == SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE) {
      tz = TimeZone.getTimeZone(query.getConnectionConfig().timeZone());
    } else {
      tz = DateTimeUtils.UTC_ZONE;
    }
    return DruidExpressions.applyTimestampCeil(
        druidExpression, isoPeriodFormat, "", tz);
  } else {
    return null;
  }
}
 
Example 10
Source File: DruidQuery.java    From calcite with Apache License 2.0 4 votes vote down vote up
/**
 * @param rexNode    leaf Input Ref to Druid Column
 * @param rowType    row type
 * @param druidQuery druid query
 *
 * @return {@link Pair} of Column name and Extraction Function on the top of the input ref or
 * {@link Pair of(null, null)} when can not translate to valid Druid column
 */
protected static Pair<String, ExtractionFunction> toDruidColumn(RexNode rexNode,
    RelDataType rowType, DruidQuery druidQuery) {
  final String columnName;
  final ExtractionFunction extractionFunction;
  final Granularity granularity;
  switch (rexNode.getKind()) {
  case INPUT_REF:
    columnName = extractColumnName(rexNode, rowType, druidQuery);
    if (needUtcTimeExtract(rexNode)) {
      extractionFunction = TimeExtractionFunction.createDefault(
          DateTimeUtils.UTC_ZONE.getID());
    } else {
      extractionFunction = null;
    }
    break;
  case EXTRACT:
    granularity = DruidDateTimeUtils
        .extractGranularity(rexNode, druidQuery.getConnectionConfig().timeZone());
    if (granularity == null) {
      // unknown Granularity
      return Pair.of(null, null);
    }
    if (!TimeExtractionFunction.isValidTimeExtract((RexCall) rexNode)) {
      return Pair.of(null, null);
    }
    RexNode extractValueNode = ((RexCall) rexNode).getOperands().get(1);
    if (extractValueNode.getType().getSqlTypeName() == SqlTypeName.DATE
        || extractValueNode.getType().getSqlTypeName() == SqlTypeName.TIMESTAMP) {
      // Use 'UTC' at the extraction level
      extractionFunction =
          TimeExtractionFunction.createExtractFromGranularity(
              granularity, DateTimeUtils.UTC_ZONE.getID());
      columnName = extractColumnName(extractValueNode, rowType, druidQuery);
    } else if (extractValueNode.getType().getSqlTypeName()
        == SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE) {
      // Use local time zone at the extraction level
      extractionFunction =
        TimeExtractionFunction.createExtractFromGranularity(
            granularity, druidQuery.getConnectionConfig().timeZone());
      columnName = extractColumnName(extractValueNode, rowType, druidQuery);
    } else {
      return Pair.of(null, null);
    }
    break;
  case FLOOR:
    granularity = DruidDateTimeUtils
        .extractGranularity(rexNode, druidQuery.getConnectionConfig().timeZone());
    if (granularity == null) {
      // unknown Granularity
      return Pair.of(null, null);
    }
    if (!TimeExtractionFunction.isValidTimeFloor((RexCall) rexNode)) {
      return Pair.of(null, null);
    }
    RexNode floorValueNode = ((RexCall) rexNode).getOperands().get(0);
    if (needUtcTimeExtract(floorValueNode)) {
      // Use 'UTC' at the extraction level, since all datetime types
      // are represented in 'UTC'
      extractionFunction =
          TimeExtractionFunction.createFloorFromGranularity(
              granularity, DateTimeUtils.UTC_ZONE.getID());
      columnName = extractColumnName(floorValueNode, rowType, druidQuery);
    } else {
      return Pair.of(null, null);
    }
    break;
  case CAST:
    // CASE we have a cast over InputRef. Check that cast is valid
    if (!isValidLeafCast(rexNode)) {
      return Pair.of(null, null);
    }
    RexNode operand0 = ((RexCall) rexNode).getOperands().get(0);
    columnName =
        extractColumnName(operand0, rowType, druidQuery);
    if (needUtcTimeExtract(rexNode)) {
      // CASE CAST to TIME/DATE need to make sure that we have valid extraction fn
      extractionFunction = TimeExtractionFunction.translateCastToTimeExtract(rexNode,
          TimeZone.getTimeZone(druidQuery.getConnectionConfig().timeZone()));
      if (extractionFunction == null) {
        // no extraction Function means cast is not valid thus bail out
        return Pair.of(null, null);
      }
    } else {
      extractionFunction = null;
    }
    break;
  default:
    return Pair.of(null, null);
  }
  return Pair.of(columnName, extractionFunction);
}