org.apache.calcite.sql.SqlNode Java Examples

The following examples show how to use org.apache.calcite.sql.SqlNode. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SqlCase.java    From calcite with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a call to the switched form of the case operator, viz:
 *
 * <blockquote><code>CASE value<br>
 * WHEN whenList[0] THEN thenList[0]<br>
 * WHEN whenList[1] THEN thenList[1]<br>
 * ...<br>
 * ELSE elseClause<br>
 * END</code></blockquote>
 */
public static SqlCase createSwitched(SqlParserPos pos, SqlNode value,
    SqlNodeList whenList, SqlNodeList thenList, SqlNode elseClause) {
  if (null != value) {
    List<SqlNode> list = whenList.getList();
    for (int i = 0; i < list.size(); i++) {
      SqlNode e = list.get(i);
      final SqlCall call;
      if (e instanceof SqlNodeList) {
        call = SqlStdOperatorTable.IN.createCall(pos, value, e);
      } else {
        call = SqlStdOperatorTable.EQUALS.createCall(pos, value, e);
      }
      list.set(i, call);
    }
  }

  if (null == elseClause) {
    elseClause = SqlLiteral.createNull(pos);
  }

  return new SqlCase(pos, null, whenList, thenList, elseClause);
}
 
Example #2
Source File: RelToSqlConverterUtil.java    From calcite with Apache License 2.0 6 votes vote down vote up
/**
 * For usage of TRIM, LTRIM and RTRIM in Hive, see
 * <a href="https://cwiki.apache.org/confluence/display/Hive/LanguageManual+UDF">Hive UDF usage</a>.
 */
public static void unparseHiveTrim(
    SqlWriter writer,
    SqlCall call,
    int leftPrec,
    int rightPrec) {
  final SqlLiteral valueToTrim = call.operand(1);
  if (valueToTrim.toValue().matches("\\s+")) {
    unparseTrimWithSpace(writer, call, leftPrec, rightPrec);
  } else {
    // SELECT TRIM(both 'A' from "ABC") -> SELECT REGEXP_REPLACE("ABC", '^(A)*', '')
    final SqlLiteral trimFlag = call.operand(0);
    final SqlCharStringLiteral regexNode =
        createRegexPatternLiteral(call.operand(1), trimFlag);
    final SqlCharStringLiteral blankLiteral =
        SqlLiteral.createCharString("", call.getParserPosition());
    final SqlNode[] trimOperands = new SqlNode[] { call.operand(2), regexNode, blankLiteral };
    final SqlCall regexReplaceCall = REGEXP_REPLACE.createCall(SqlParserPos.ZERO, trimOperands);
    regexReplaceCall.unparse(writer, leftPrec, rightPrec);
  }
}
 
Example #3
Source File: SqlAdvisor.java    From Bats with Apache License 2.0 6 votes vote down vote up
/**
 * Attempts to parse a SQL statement and adds to the errorList if any syntax
 * error is found. This implementation uses {@link SqlParser}. Subclass can
 * re-implement this with a different parser implementation
 *
 * @param sql       A user-input sql statement to be parsed
 * @param errorList A {@link List} of error to be added to
 * @return {@link SqlNode } that is root of the parse tree, null if the sql
 * is not valid
 */
protected SqlNode collectParserError(
    String sql,
    List<ValidateErrorInfo> errorList) {
  try {
    return parseQuery(sql);
  } catch (SqlParseException e) {
    ValidateErrorInfo errInfo =
        new ValidateErrorInfo(
            e.getPos(),
            e.getMessage());

    // parser only returns 1 exception now
    errorList.add(errInfo);
    return null;
  }
}
 
Example #4
Source File: HiveDDLUtils.java    From flink with Apache License 2.0 6 votes vote down vote up
private static SqlNodeList checkReservedProperties(Set<String> reservedProperties, SqlNodeList properties,
		String metaType) throws ParseException {
	if (properties == null) {
		return null;
	}
	Set<String> match = new HashSet<>();
	for (SqlNode node : properties) {
		if (node instanceof SqlTableOption) {
			String key = ((SqlTableOption) node).getKeyString();
			if (reservedProperties.contains(key)) {
				match.add(key);
			}
		}
	}
	if (!match.isEmpty()) {
		throw new ParseException(String.format(
				"Properties %s are reserved and shouldn't be used for Hive %s", match, metaType));
	}
	return properties;
}
 
Example #5
Source File: AccelCreateReflectionHandler.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
@Override
public List<SimpleCommandResult> toResult(String sql, SqlNode sqlNode) throws Exception {
  final SqlCreateReflection addLayout = SqlNodeUtil.unwrap(sqlNode, SqlCreateReflection.class);
  final TableWithPath table = SchemaUtilities.verify(catalog, addLayout.getTblName());
  SqlIdentifier identifier = addLayout.getName();
  String name;
  if(identifier != null) {
    name = identifier.toString();
  } else {
    name = "Unnamed-" + ThreadLocalRandom.current().nextLong();
  }

  final LayoutDefinition layout = new LayoutDefinition(name,
      addLayout.isRaw() ? LayoutDefinition.Type.RAW : LayoutDefinition.Type.AGGREGATE,
      table.qualifyColumns(addLayout.getDisplayList()),
      qualifyColumnsWithGranularity(table.getTable(), addLayout.getDimensionList()),
      qualifyColumnsWithMeasures(table.getTable(), addLayout.getMeasureList()),
      table.qualifyColumns(addLayout.getSortList()),
      table.qualifyColumns(addLayout.getDistributionList()),
      table.qualifyColumns(addLayout.getPartitionList()),
      addLayout.getPartitionDistributionStrategy()
  );
  accel.addLayout(table.getPath(), layout, reflectionContext);
  return Collections.singletonList(SimpleCommandResult.successful("Layout added."));
}
 
Example #6
Source File: AbstractTypeCoercion.java    From calcite with Apache License 2.0 5 votes vote down vote up
/**
 * Update inferred type for a SqlNode.
 */
protected void updateInferredType(SqlNode node, RelDataType type) {
  validator.setValidatedNodeType(node, type);
  final SqlValidatorNamespace namespace = validator.getNamespace(node);
  if (namespace != null) {
    namespace.setType(type);
  }
}
 
Example #7
Source File: SqlValidatorUtil.java    From calcite with Apache License 2.0 5 votes vote down vote up
public SqlNode visit(SqlNodeList list) {
  SqlNodeList copy = new SqlNodeList(list.getParserPosition());
  for (SqlNode node : list) {
    copy.add(node.accept(this));
  }
  return copy;
}
 
Example #8
Source File: PlannerImpl.java    From calcite with Apache License 2.0 5 votes vote down vote up
@Override public RelRoot expandView(RelDataType rowType, String queryString,
    List<String> schemaPath, List<String> viewPath) {
  if (planner == null) {
    ready();
  }
  SqlParser parser = SqlParser.create(queryString, parserConfig);
  SqlNode sqlNode;
  try {
    sqlNode = parser.parseQuery();
  } catch (SqlParseException e) {
    throw new RuntimeException("parse failed", e);
  }

  final CalciteCatalogReader catalogReader =
      createCatalogReader().withSchemaPath(schemaPath);
  final SqlValidator validator = createSqlValidator(catalogReader);

  final RexBuilder rexBuilder = createRexBuilder();
  final RelOptCluster cluster = RelOptCluster.create(planner, rexBuilder);
  final SqlToRelConverter.Config config = SqlToRelConverter
      .configBuilder()
      .withConfig(sqlToRelConverterConfig)
      .withTrimUnusedFields(false)
      .build();
  final SqlToRelConverter sqlToRelConverter =
      new SqlToRelConverter(this, validator,
          catalogReader, cluster, convertletTable, config);

  final RelRoot root =
      sqlToRelConverter.convertQuery(sqlNode, true, false);
  final RelRoot root2 =
      root.withRel(sqlToRelConverter.flattenTypes(root.rel, true));
  final RelBuilder relBuilder =
      config.getRelBuilderFactory().create(cluster, null);
  return root2.withRel(
      RelDecorrelator.decorrelateQuery(root.rel, relBuilder));
}
 
Example #9
Source File: SqlParserUtil.java    From calcite with Apache License 2.0 5 votes vote down vote up
private static SqlNode convert(PrecedenceClimbingParser.Token token) {
  switch (token.type) {
  case ATOM:
    return (SqlNode) token.o;
  case CALL:
    final PrecedenceClimbingParser.Call call =
        (PrecedenceClimbingParser.Call) token;
    final List<SqlNode> list = new ArrayList<>();
    for (PrecedenceClimbingParser.Token arg : call.args) {
      list.add(convert(arg));
    }
    final ToTreeListItem item = (ToTreeListItem) call.op.o;
    if (item.op == SqlStdOperatorTable.UNARY_MINUS
        && list.size() == 1
        && list.get(0) instanceof SqlNumericLiteral) {
      return SqlLiteral.createNegative((SqlNumericLiteral) list.get(0),
          item.pos.plusAll(list));
    }
    if (item.op == SqlStdOperatorTable.UNARY_PLUS
        && list.size() == 1
        && list.get(0) instanceof SqlNumericLiteral) {
      return list.get(0);
    }
    return item.op.createCall(item.pos.plusAll(list), list);
  default:
    throw new AssertionError(token);
  }
}
 
Example #10
Source File: ProcedureNamespace.java    From Bats with Apache License 2.0 5 votes vote down vote up
ProcedureNamespace(
    SqlValidatorImpl validator,
    SqlValidatorScope scope,
    SqlCall call,
    SqlNode enclosingNode) {
  super(validator, enclosingNode);
  this.scope = scope;
  this.call = call;
}
 
Example #11
Source File: SqlImplementor.java    From Bats with Apache License 2.0 5 votes vote down vote up
public void addSelect(List<SqlNode> selectList, SqlNode node, RelDataType rowType) {
    String name = rowType.getFieldNames().get(selectList.size());
    String alias = SqlValidatorUtil.getAlias(node, -1);
    if (alias == null || !alias.equals(name)) {
        node = SqlStdOperatorTable.AS.createCall(POS, node, new SqlIdentifier(name, POS));
    }
    selectList.add(node);
}
 
Example #12
Source File: SqlMultisetValueConstructor.java    From Bats with Apache License 2.0 5 votes vote down vote up
public void unparse(
    SqlWriter writer,
    SqlCall call,
    int leftPrec,
    int rightPrec) {
  writer.keyword(getName()); // "MULTISET" or "ARRAY"
  final SqlWriter.Frame frame = writer.startList("[", "]");
  for (SqlNode operand : call.getOperandList()) {
    writer.sep(",");
    operand.unparse(writer, leftPrec, rightPrec);
  }
  writer.endList(frame);
}
 
Example #13
Source File: RichSqlHiveInsert.java    From flink with Apache License 2.0 5 votes vote down vote up
private static Map<SqlIdentifier, SqlProperty> getPartKeyToSpec(SqlNodeList staticSpec) {
	Map<SqlIdentifier, SqlProperty> res = new HashMap<>();
	if (staticSpec != null) {
		for (SqlNode node : staticSpec) {
			SqlProperty spec = (SqlProperty) node;
			res.put(spec.getKey(), spec);
		}
	}
	return res;
}
 
Example #14
Source File: SqlTrimFunction.java    From Bats with Apache License 2.0 5 votes vote down vote up
public SqlCall createCall(
    SqlLiteral functionQualifier,
    SqlParserPos pos,
    SqlNode... operands) {
  assert functionQualifier == null;
  switch (operands.length) {
  case 1:
    // This variant occurs when someone writes TRIM(string)
    // as opposed to the sugared syntax TRIM(string FROM string).
    operands = new SqlNode[]{
      Flag.BOTH.symbol(SqlParserPos.ZERO),
      SqlLiteral.createCharString(" ", pos),
      operands[0]
    };
    break;
  case 3:
    assert operands[0] instanceof SqlLiteral
        && ((SqlLiteral) operands[0]).getValue() instanceof Flag;
    if (operands[1] == null) {
      operands[1] = SqlLiteral.createCharString(" ", pos);
    }
    break;
  default:
    throw new IllegalArgumentException(
        "invalid operand count " + Arrays.toString(operands));
  }
  return super.createCall(functionQualifier, pos, operands);
}
 
Example #15
Source File: RelToSqlConverter.java    From calcite with Apache License 2.0 5 votes vote down vote up
@Override public SqlNode visit(SqlIdentifier id) {
  if (tableAlias.equals(id.names.get(0))) {
    int index = tableType.getField(
        id.names.get(1), false, false).getIndex();
    SqlNode selectItem = replaceSource.get(index);
    if (selectItem.getKind() == SqlKind.AS) {
      selectItem = ((SqlCall) selectItem).operand(0);
    }
    return selectItem.clone(id.getParserPosition());
  }
  return id;
}
 
Example #16
Source File: RelToSqlConverter.java    From dremio-oss with Apache License 2.0 5 votes vote down vote up
/**
 * Converts a list of {@link RexNode} expressions to {@link SqlNode}
 * expressions.
 */
private SqlNodeList exprList(final Context context,
                             List<? extends RexNode> exprs) {
  return new SqlNodeList(
    Lists.transform(exprs,
      (Function<RexNode, SqlNode>) e -> context.toSql(null, e)), POS);
}
 
Example #17
Source File: StandardConvertletTable.java    From calcite with Apache License 2.0 5 votes vote down vote up
private SqlNode getCastedSqlNode(SqlNode argInput, RelDataType varType,
    SqlParserPos pos, RexNode argRex) {
  SqlNode arg;
  if (argRex != null && !argRex.getType().equals(varType)) {
    arg = SqlStdOperatorTable.CAST.createCall(
        pos, argInput, SqlTypeUtil.convertTypeToSpec(varType));
  } else {
    arg = argInput;
  }
  return arg;
}
 
Example #18
Source File: SqlValidatorImpl.java    From flink with Apache License 2.0 5 votes vote down vote up
public List<List<String>> getFieldOrigins(SqlNode sqlQuery) {
	if (sqlQuery instanceof SqlExplain) {
		return Collections.emptyList();
	}
	final RelDataType rowType = getValidatedNodeType(sqlQuery);
	final int fieldCount = rowType.getFieldCount();
	if (!sqlQuery.isA(SqlKind.QUERY)) {
		return Collections.nCopies(fieldCount, null);
	}
	final List<List<String>> list = new ArrayList<>();
	for (int i = 0; i < fieldCount; i++) {
		list.add(getFieldOrigin(sqlQuery, i));
	}
	return ImmutableNullableList.copyOf(list);
}
 
Example #19
Source File: SqlValidatorImpl.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private boolean addOrExpandField(List<SqlNode> selectItems, Set<String> aliases,
	List<Map.Entry<String, RelDataType>> fields, boolean includeSystemVars,
	SelectScope scope, SqlIdentifier id, RelDataTypeField field) {
	switch (field.getType().getStructKind()) {
		case PEEK_FIELDS:
		case PEEK_FIELDS_DEFAULT:
			final SqlNode starExp = id.plusStar();
			expandStar(
				selectItems,
				aliases,
				fields,
				includeSystemVars,
				scope,
				starExp);
			return true;

		default:
			addToSelectList(
				selectItems,
				aliases,
				fields,
				id,
				scope,
				includeSystemVars);
	}

	return false;
}
 
Example #20
Source File: RichSqlHiveInsert.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
	writer.startList(SqlWriter.FrameTypeEnum.SELECT);
	String insertKeyword = "INSERT INTO";
	if (isUpsert()) {
		insertKeyword = "UPSERT INTO";
	} else if (isOverwrite()) {
		insertKeyword = "INSERT OVERWRITE";
	}
	writer.sep(insertKeyword);
	final int opLeft = getOperator().getLeftPrec();
	final int opRight = getOperator().getRightPrec();
	getTargetTable().unparse(writer, opLeft, opRight);
	if (getTargetColumnList() != null) {
		getTargetColumnList().unparse(writer, opLeft, opRight);
	}
	writer.newlineAndIndent();
	if (allPartKeys != null && allPartKeys.size() > 0) {
		writer.keyword("PARTITION");
		SqlWriter.Frame frame = writer.startList("(", ")");
		for (SqlNode node : allPartKeys) {
			writer.sep(",", false);
			SqlIdentifier partKey = (SqlIdentifier) node;
			SqlProperty spec = partKeyToSpec.get(partKey);
			if (spec != null) {
				spec.unparse(writer, leftPrec, rightPrec);
			} else {
				partKey.unparse(writer, leftPrec, rightPrec);
			}
		}
		writer.endList(frame);
		writer.newlineAndIndent();
	}
	getSource().unparse(writer, 0, 0);
}
 
Example #21
Source File: DremioRelToSqlConverter.java    From dremio-oss with Apache License 2.0 5 votes vote down vote up
@Override
SqlSelect wrapSelect(SqlNode node) {
  // We override Calcite's wrapSelect to fail fast here. Dremio needs the Result for its wrapSelect implementation,
  // so it implements it as an instance method on DremioRelToSqlConverter.Result as wrapSelectAndPushOrderBy()
  //
  // We need to replace any calls to SqlImplementor#wrapSelect() with
  // DremioRelToSqlConverter.Result#wrapSelectAndPushOrderBy().
  throw new UnsupportedOperationException();
}
 
Example #22
Source File: SqlAnalyzeTable.java    From Bats with Apache License 2.0 5 votes vote down vote up
public List<String> getFieldNames() {
  if (fieldList == null) {
    return ImmutableList.of();
  }

  List<String> columnNames = Lists.newArrayList();
  for (SqlNode node : fieldList.getList()) {
    columnNames.add(node.toString());
  }
  return columnNames;
}
 
Example #23
Source File: SqlParseUtil.java    From alchemy with Apache License 2.0 5 votes vote down vote up
private static void parseSource(SqlSelect sqlSelect, List<String> sources, List<String> udfs)
    throws SqlParseException {
    SqlNodeList selectList = sqlSelect.getSelectList();
    SqlNode from = sqlSelect.getFrom();
    SqlNode where = sqlSelect.getWhere();
    SqlNode having = sqlSelect.getHaving();
    parseSelectList(selectList, sources, udfs);
    parseFrom(from, sources, udfs);
    parseFunction(where, udfs);
    parseFunction(having, udfs);
}
 
Example #24
Source File: PlannerImpl.java    From Mycat2 with GNU General Public License v3.0 5 votes vote down vote up
public SqlNode parse(final Reader reader) throws SqlParseException {
    switch (state) {
        case STATE_0_CLOSED:
        case STATE_1_RESET:
            ready();
    }
    ensure(State.STATE_2_READY);
    SqlParser parser = SqlParser.create(reader, parserConfig);
    SqlNode sqlNode = parser.parseStmt();
    state = State.STATE_3_PARSED;
    return sqlNode;
}
 
Example #25
Source File: HiveDDLUtils.java    From flink with Apache License 2.0 5 votes vote down vote up
public static SqlNodeList ensureNonGeneric(SqlNodeList props) throws ParseException {
	for (SqlNode node : props) {
		if (node instanceof SqlTableOption && ((SqlTableOption) node).getKeyString().equalsIgnoreCase(CatalogConfig.IS_GENERIC)) {
			if (!((SqlTableOption) node).getValueString().equalsIgnoreCase("false")) {
				throw new ParseException("Creating generic object with Hive dialect is not allowed");
			}
		}
	}
	return props;
}
 
Example #26
Source File: SqlValidatorImpl.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private List<String> getFieldOrigin(SqlNode sqlQuery, int i) {
	if (sqlQuery instanceof SqlSelect) {
		SqlSelect sqlSelect = (SqlSelect) sqlQuery;
		final SelectScope scope = getRawSelectScope(sqlSelect);
		final List<SqlNode> selectList = scope.getExpandedSelectList();
		final SqlNode selectItem = stripAs(selectList.get(i));
		if (selectItem instanceof SqlIdentifier) {
			final SqlQualified qualified =
				scope.fullyQualify((SqlIdentifier) selectItem);
			SqlValidatorNamespace namespace = qualified.namespace;
			final SqlValidatorTable table = namespace.getTable();
			if (table == null) {
				return null;
			}
			final List<String> origin =
				new ArrayList<>(table.getQualifiedName());
			for (String name : qualified.suffix()) {
				namespace = namespace.lookupChild(name);
				if (namespace == null) {
					return null;
				}
				origin.add(name);
			}
			return origin;
		}
		return null;
	} else if (sqlQuery instanceof SqlOrderBy) {
		return getFieldOrigin(((SqlOrderBy) sqlQuery).query, i);
	} else {
		return null;
	}
}
 
Example #27
Source File: SqlValidatorUtil.java    From calcite with Apache License 2.0 5 votes vote down vote up
/** Analyzes a tuple in a GROUPING SETS clause.
 *
 * <p>For example, in {@code GROUP BY GROUPING SETS ((a, b), a, c)},
 * {@code (a, b)} is a tuple.
 *
 * <p>Gathers into {@code groupExprs} the set of distinct expressions being
 * grouped, and returns a bitmap indicating which expressions this tuple
 * is grouping. */
private static List<ImmutableBitSet> analyzeGroupTuple(SqlValidatorScope scope,
     GroupAnalyzer groupAnalyzer, List<SqlNode> operandList) {
  List<ImmutableBitSet> list = new ArrayList<>();
  for (SqlNode operand : operandList) {
    list.add(
        analyzeGroupExpr(scope, groupAnalyzer, operand));
  }
  return list;
}
 
Example #28
Source File: SqlValidatorImpl.java    From flink with Apache License 2.0 5 votes vote down vote up
private void checkRollUpInOrderBy(SqlSelect select) {
	SqlNodeList orderList = select.getOrderList();
	if (orderList != null) {
		for (SqlNode node : orderList) {
			checkRollUp(null, select, node, getOrderScope(select), "ORDER BY");
		}
	}
}
 
Example #29
Source File: TestSQLCreateEmptyTable.java    From dremio-oss with Apache License 2.0 5 votes vote down vote up
@Test
public void testCreateTableDecimalDatatype() {
  String sql = "CREATE TABLE newTbl(id DECIMAL(38, 2), name varchar) PARTITION BY (name)";
  SqlNode sqlNode = SqlConverter.parseSingleStatementImpl(sql, parserConfig, false);
  Assert.assertTrue(sqlNode.isA(Sets.immutableEnumSet(SqlKind.OTHER_DDL)));
  SqlCreateEmptyTable sqlCreateEmptyTable = (SqlCreateEmptyTable) sqlNode;
  Assert.assertArrayEquals(new String[]{"`id` DECIMAL(38, 2)", "`name` VARCHAR"},
    sqlCreateEmptyTable.getFieldNames().toArray(new String[0]));
  Assert.assertArrayEquals(new String[]{"name"}, sqlCreateEmptyTable.getPartitionColumns(null, null).toArray(new String[0]));
}
 
Example #30
Source File: SqlImplementor.java    From calcite with Apache License 2.0 5 votes vote down vote up
/** Creates a result based on a join. (Each join could contain one or more
 * relational expressions.) */
public Result result(SqlNode join, Result leftResult, Result rightResult) {
  final ImmutableMap.Builder<String, RelDataType> builder =
      ImmutableMap.builder();
  if (join.getKind() == SqlKind.JOIN) {
    collectAliases(builder, join,
        Iterables.concat(leftResult.aliases.values(),
            rightResult.aliases.values()).iterator());
    return new Result(join, Expressions.list(Clause.FROM), null, null,
        builder.build());
  } else {
    return new Result(join, Expressions.list(Clause.FROM), null, null,
        leftResult.aliases);
  }
}