Java Code Examples for org.apache.calcite.sql.parser.SqlParser

The following examples show how to use org.apache.calcite.sql.parser.SqlParser. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: calcite   Source File: AbstractMaterializedViewTest.java    License: Apache License 2.0 6 votes vote down vote up
private RelNode toRel(RelOptCluster cluster, SchemaPlus rootSchema,
    SchemaPlus defaultSchema, String sql) throws SqlParseException {
  final SqlParser parser = SqlParser.create(sql, SqlParser.Config.DEFAULT);
  final SqlNode parsed = parser.parseStmt();

  final CalciteCatalogReader catalogReader = new CalciteCatalogReader(
      CalciteSchema.from(rootSchema),
      CalciteSchema.from(defaultSchema).path(null),
      new JavaTypeFactoryImpl(), new CalciteConnectionConfigImpl(new Properties()));

  final SqlValidator validator = new ValidatorForTest(SqlStdOperatorTable.instance(),
      catalogReader, new JavaTypeFactoryImpl(), SqlConformanceEnum.DEFAULT);
  final SqlNode validated = validator.validate(parsed);
  final SqlToRelConverter.Config config = SqlToRelConverter.configBuilder()
      .withTrimUnusedFields(true)
      .withExpand(true)
      .withDecorrelationEnabled(true)
      .build();
  final SqlToRelConverter converter = new SqlToRelConverter(
      (rowType, queryString, schemaPath, viewPath) -> {
        throw new UnsupportedOperationException("cannot expand view");
      }, validator, catalogReader, cluster, StandardConvertletTable.INSTANCE, config);
  return converter.convertQuery(validated, false, true).rel;
}
 
Example 2
Source Project: calcite   Source File: CalciteMetaImpl.java    License: Apache License 2.0 6 votes vote down vote up
private ImmutableMap.Builder<DatabaseProperty, Object> addProperty(
    ImmutableMap.Builder<DatabaseProperty, Object> builder,
    DatabaseProperty p) {
  switch (p) {
  case GET_S_Q_L_KEYWORDS:
    return builder.put(p,
        SqlParser.create("").getMetadata().getJdbcKeywords());
  case GET_NUMERIC_FUNCTIONS:
    return builder.put(p, SqlJdbcFunctionCall.getNumericFunctions());
  case GET_STRING_FUNCTIONS:
    return builder.put(p, SqlJdbcFunctionCall.getStringFunctions());
  case GET_SYSTEM_FUNCTIONS:
    return builder.put(p, SqlJdbcFunctionCall.getSystemFunctions());
  case GET_TIME_DATE_FUNCTIONS:
    return builder.put(p, SqlJdbcFunctionCall.getTimeDateFunctions());
  default:
    return builder;
  }
}
 
Example 3
Source Project: flink   Source File: PlannerContext.java    License: Apache License 2.0 6 votes vote down vote up
private FlinkCalciteCatalogReader createCatalogReader(
		boolean lenientCaseSensitivity,
		String currentCatalog,
		String currentDatabase) {
	SqlParser.Config sqlParserConfig = getSqlParserConfig();
	final boolean caseSensitive;
	if (lenientCaseSensitivity) {
		caseSensitive = false;
	} else {
		caseSensitive = sqlParserConfig.caseSensitive();
	}

	SqlParser.Config newSqlParserConfig = SqlParser.configBuilder(sqlParserConfig)
			.setCaseSensitive(caseSensitive)
			.build();

	SchemaPlus rootSchema = getRootSchema(this.rootSchema.plus());
	return new FlinkCalciteCatalogReader(
			CalciteSchema.from(rootSchema),
			asList(
					asList(currentCatalog, currentDatabase),
					singletonList(currentCatalog)
			),
			typeFactory,
			CalciteConfig$.MODULE$.connectionConfig(newSqlParserConfig));
}
 
Example 4
Source Project: alchemy   Source File: SqlParseUtil.java    License: Apache License 2.0 6 votes vote down vote up
public static void parse(List<String> sqls, List<String> sources, List<String> udfs, List<String> sinks)
    throws SqlParseException {
    for (String sql : sqls) {
        SqlParser sqlParser = SqlParser.create(sql, CONFIG);
        SqlNode sqlNode = sqlParser.parseStmt();
        SqlKind kind = sqlNode.getKind();
        switch (kind){
            case INSERT:
                SqlInsert sqlInsert = (SqlInsert)sqlNode;
                addSink(sinks, findSinkName(sqlInsert));
                SqlSelect source = (SqlSelect) sqlInsert.getSource();
                parseSource(source, sources, udfs);
                break;
            case SELECT:
                parseSource((SqlSelect) sqlNode, sources, udfs);
                break;
            default:
                throw new IllegalArgumentException("It must be an insert SQL, sql:" + sql);
        }
    }
}
 
Example 5
Source Project: calcite   Source File: SortRemoveRuleTest.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * The default schema that is used in these tests provides tables sorted on the primary key. Due
 * to this scan operators always come with a {@link org.apache.calcite.rel.RelCollation} trait.
 */
private RelNode transform(String sql, RuleSet prepareRules) throws Exception {
  final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
  final SchemaPlus defSchema = rootSchema.add("hr", new HrClusteredSchema());
  final FrameworkConfig config = Frameworks.newConfigBuilder()
      .parserConfig(SqlParser.Config.DEFAULT)
      .defaultSchema(defSchema)
      .traitDefs(ConventionTraitDef.INSTANCE, RelCollationTraitDef.INSTANCE)
      .programs(
          Programs.of(prepareRules),
          Programs.ofRules(SortRemoveRule.INSTANCE))
      .build();
  Planner planner = Frameworks.getPlanner(config);
  SqlNode parse = planner.parse(sql);
  SqlNode validate = planner.validate(parse);
  RelRoot planRoot = planner.rel(validate);
  RelNode planBefore = planRoot.rel;
  RelTraitSet desiredTraits = planBefore.getTraitSet()
      .replace(EnumerableConvention.INSTANCE);
  RelNode planAfter = planner.transform(0, desiredTraits, planBefore);
  return planner.transform(1, desiredTraits, planAfter);
}
 
Example 6
Source Project: kylin-on-parquet-v2   Source File: SqlConverter.java    License: Apache License 2.0 6 votes vote down vote up
public String convertSql(String orig) {
    // for jdbc source, convert quote from backtick to double quote
    String converted = orig.replaceAll("`", "\"");

    if (!configurer.skipHandleDefault()) {
        String escapedDefault = SqlDialect.CALCITE
                .quoteIdentifier(configurer.useUppercaseDefault() ? "DEFAULT" : "default");
        converted = converted.replaceAll("(?i)default\\.", escapedDefault + "."); // use Calcite dialect to cater to SqlParser
        converted = converted.replaceAll("\"(?i)default\"\\.", escapedDefault + ".");
    }

    if (!configurer.skipDefaultConvert()) {
        try {
            SqlNode sqlNode = SqlParser.create(converted).parseQuery();
            sqlNode = sqlNode.accept(sqlNodeConverter);
            converted = sqlWriter.format(sqlNode);
        } catch (Throwable e) {
            logger.error("Failed to default convert sql, will use the input: {}", orig, e);
        } finally {
            sqlWriter.reset();
        }
    }
    converted = configurer.fixAfterDefaultConvert(converted);
    return converted;
}
 
Example 7
Source Project: dremio-oss   Source File: SqlReservedKeywordGenerator.java    License: Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
  if (args.length != 1) {
    throw new IllegalArgumentException("Usage: java {cp} " + SqlReservedKeywordGenerator.class.getName() +
        " path/where/to/write/the/file");
  }

  final File outputFile = new File(args[0], RESERVED_KEYWORD_FILE_NAME);
  System.out.println("Writing reserved SQL keywords to file: " + outputFile.getAbsolutePath());

  try(PrintWriter outFile = new PrintWriter(new OutputStreamWriter(new FileOutputStream(outputFile), UTF_8))) {
    outFile.printf("# AUTO-GENERATED LIST OF SQL RESERVED KEYWORDS (generated by %s)",
        SqlReservedKeywordGenerator.class.getName());
    outFile.println();

    final SqlAbstractParserImpl.Metadata metadata = SqlParser.create("", new ParserConfig(Quoting.DOUBLE_QUOTE, 256)).getMetadata();
    for (String s : metadata.getTokens()) {
      if (metadata.isKeyword(s) && metadata.isReservedWord(s)) {
        outFile.println(s);
      }
    }
  }
}
 
Example 8
Source Project: calcite   Source File: FrameworksTest.java    License: Apache License 2.0 6 votes vote down vote up
/** Test case for
 * <a href="https://issues.apache.org/jira/browse/CALCITE-2039">[CALCITE-2039]
 * AssertionError when pushing project to ProjectableFilterableTable</a>
 * using UPDATE via {@link Frameworks}. */
@Test void testUpdate() throws Exception {
  Table table = new TableImpl();
  final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
  SchemaPlus schema = rootSchema.add("x", new AbstractSchema());
  schema.add("MYTABLE", table);
  List<RelTraitDef> traitDefs = new ArrayList<>();
  traitDefs.add(ConventionTraitDef.INSTANCE);
  traitDefs.add(RelDistributionTraitDef.INSTANCE);
  SqlParser.Config parserConfig =
      SqlParser.configBuilder(SqlParser.Config.DEFAULT)
          .setCaseSensitive(false)
          .build();

  final FrameworkConfig config = Frameworks.newConfigBuilder()
      .parserConfig(parserConfig)
      .defaultSchema(schema)
      .traitDefs(traitDefs)
      // define the rules you want to apply
      .ruleSets(
          RuleSets.ofList(AbstractConverter.ExpandConversionRule.INSTANCE))
      .programs(Programs.ofRules(Programs.RULE_SET))
      .build();
  executeQuery(config, " UPDATE MYTABLE set id=7 where id=1",
      CalciteSystemProperty.DEBUG.value());
}
 
Example 9
Source Project: calcite   Source File: PlannerTest.java    License: Apache License 2.0 6 votes vote down vote up
private void runDuplicateSortCheck(String sql, String plan) throws Exception {
  RuleSet ruleSet =
      RuleSets.ofList(
          SortRemoveRule.INSTANCE,
          EnumerableRules.ENUMERABLE_TABLE_SCAN_RULE,
          EnumerableRules.ENUMERABLE_PROJECT_RULE,
          EnumerableRules.ENUMERABLE_WINDOW_RULE,
          EnumerableRules.ENUMERABLE_SORT_RULE,
          ProjectToWindowRule.PROJECT);
  Planner planner = getPlanner(null,
      SqlParser.configBuilder().setLex(Lex.JAVA).build(),
      Programs.of(ruleSet));
  SqlNode parse = planner.parse(sql);
  SqlNode validate = planner.validate(parse);
  RelNode convert = planner.rel(validate).rel;
  RelTraitSet traitSet = convert.getTraitSet()
      .replace(EnumerableConvention.INSTANCE);
  if (traitSet.getTrait(RelCollationTraitDef.INSTANCE) == null) {
    // SortRemoveRule can only work if collation trait is enabled.
    return;
  }
  RelNode transform = planner.transform(0, traitSet, convert);
  assertThat(toString(transform), equalTo(plan));
}
 
Example 10
Source Project: quark   Source File: QuarkMetaImpl.java    License: Apache License 2.0 6 votes vote down vote up
private ImmutableMap.Builder<DatabaseProperty, Object> addProperty(
    ImmutableMap.Builder<DatabaseProperty, Object> builder,
    DatabaseProperty p) {
  switch (p) {
    case GET_S_Q_L_KEYWORDS:
      return builder.put(p,
          SqlParser.create("").getMetadata().getJdbcKeywords());
    case GET_NUMERIC_FUNCTIONS:
      return builder.put(p, SqlJdbcFunctionCall.getNumericFunctions());
    case GET_STRING_FUNCTIONS:
      return builder.put(p, SqlJdbcFunctionCall.getStringFunctions());
    case GET_SYSTEM_FUNCTIONS:
      return builder.put(p, SqlJdbcFunctionCall.getSystemFunctions());
    case GET_TIME_DATE_FUNCTIONS:
      return builder.put(p, SqlJdbcFunctionCall.getTimeDateFunctions());
    default:
      return builder;
  }
}
 
Example 11
Source Project: quark   Source File: ParserFactory.java    License: Apache License 2.0 6 votes vote down vote up
public Parser getParser(String sql, Properties info)
    throws SQLException {
  SqlParser parser = SqlParser.create(sql,
      SqlParser.configBuilder()
          .setQuotedCasing(Casing.UNCHANGED)
          .setUnquotedCasing(Casing.UNCHANGED)
          .setQuoting(Quoting.DOUBLE_QUOTE)
          .setParserFactory(QuarkParserImpl.FACTORY)
          .build());
  SqlNode sqlNode;
  try {
    sqlNode = parser.parseStmt();
  } catch (SqlParseException e) {
    throw new RuntimeException(
        "parse failed: " + e.getMessage(), e);
  }
  if (sqlNode.getKind().equals(SqlKind.OTHER_DDL)) {
    return new DDLParser();
  } else  {
    return getSqlQueryParser(info);
  }
}
 
Example 12
Source Project: calcite   Source File: PlannerTest.java    License: Apache License 2.0 6 votes vote down vote up
/** Checks that a query returns a particular plan, using a planner with
 * MultiJoinOptimizeBushyRule enabled. */
private void checkBushy(String sql, String expected) throws Exception {
  final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
  final FrameworkConfig config = Frameworks.newConfigBuilder()
      .parserConfig(SqlParser.Config.DEFAULT)
      .defaultSchema(
          CalciteAssert.addSchema(rootSchema,
              CalciteAssert.SchemaSpec.CLONE_FOODMART))
      .traitDefs((List<RelTraitDef>) null)
      .programs(Programs.heuristicJoinOrder(Programs.RULE_SET, true, 2))
      .build();
  Planner planner = Frameworks.getPlanner(config);
  SqlNode parse = planner.parse(sql);

  SqlNode validate = planner.validate(parse);
  RelNode convert = planner.rel(validate).project();
  RelTraitSet traitSet = convert.getTraitSet()
      .replace(EnumerableConvention.INSTANCE);
  RelNode transform = planner.transform(0, traitSet, convert);
  assertThat(toString(transform), containsString(expected));
}
 
Example 13
Source Project: flink   Source File: PlannerContext.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Returns the SQL parser config for this environment including a custom Calcite configuration.
 */
private SqlParser.Config getSqlParserConfig() {
	return JavaScalaConversionUtil.<SqlParser.Config>toJava(getCalciteConfig(tableConfig).getSqlParserConfig()).orElseGet(
			// we use Java lex because back ticks are easier than double quotes in programming
			// and cases are preserved
			() -> {
				SqlConformance conformance = getSqlConformance();
				return SqlParser
						.configBuilder()
						.setParserFactory(FlinkSqlParserFactories.create(conformance))
						.setConformance(conformance)
						.setLex(Lex.JAVA)
						.setIdentifierMaxLength(256)
						.build();
			}
	);
}
 
Example 14
Source Project: calcite   Source File: PlannerImpl.java    License: Apache License 2.0 5 votes vote down vote up
public SqlNode parse(final Reader reader) throws SqlParseException {
  switch (state) {
  case STATE_0_CLOSED:
  case STATE_1_RESET:
    ready();
  }
  ensure(State.STATE_2_READY);
  SqlParser parser = SqlParser.create(reader, parserConfig);
  SqlNode sqlNode = parser.parseStmt();
  state = State.STATE_3_PARSED;
  return sqlNode;
}
 
Example 15
Source Project: calcite   Source File: TpcdsLatticeSuggesterTest.java    License: Apache License 2.0 5 votes vote down vote up
static Frameworks.ConfigBuilder config(CalciteAssert.SchemaSpec spec) {
  final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
  final SchemaPlus schema = CalciteAssert.addSchema(rootSchema, spec);
  return Frameworks.newConfigBuilder()
      .parserConfig(SqlParser.Config.DEFAULT)
      .defaultSchema(schema);
}
 
Example 16
Source Project: calcite   Source File: Frameworks.java    License: Apache License 2.0 5 votes vote down vote up
StdFrameworkConfig(Context context,
    SqlRexConvertletTable convertletTable,
    SqlOperatorTable operatorTable,
    ImmutableList<Program> programs,
    ImmutableList<RelTraitDef> traitDefs,
    SqlParser.Config parserConfig,
    SqlValidator.Config sqlValidatorConfig,
    SqlToRelConverter.Config sqlToRelConverterConfig,
    SchemaPlus defaultSchema,
    RelOptCostFactory costFactory,
    RelDataTypeSystem typeSystem,
    RexExecutor executor,
    boolean evolveLattice,
    SqlStatisticProvider statisticProvider,
    RelOptTable.ViewExpander viewExpander) {
  this.context = context;
  this.convertletTable = convertletTable;
  this.operatorTable = operatorTable;
  this.programs = programs;
  this.traitDefs = traitDefs;
  this.parserConfig = parserConfig;
  this.sqlValidatorConfig = sqlValidatorConfig;
  this.sqlToRelConverterConfig = sqlToRelConverterConfig;
  this.defaultSchema = defaultSchema;
  this.costFactory = costFactory;
  this.typeSystem = typeSystem;
  this.executor = executor;
  this.evolveLattice = evolveLattice;
  this.statisticProvider = statisticProvider;
  this.viewExpander = viewExpander;
}
 
Example 17
Source Project: calcite   Source File: PlannerTest.java    License: Apache License 2.0 5 votes vote down vote up
/** Test case for
 * <a href="https://issues.apache.org/jira/browse/CALCITE-569">[CALCITE-569]
 * ArrayIndexOutOfBoundsException when deducing collation</a>. */
@Test void testOrderByNonSelectColumn() throws Exception {
  final SchemaPlus schema = Frameworks.createRootSchema(true)
      .add("tpch", new ReflectiveSchema(new TpchSchema()));

  String query = "select t.psPartkey from\n"
      + "(select ps.psPartkey from `tpch`.`partsupp` ps\n"
      + "order by ps.psPartkey, ps.psSupplyCost) t\n"
      + "order by t.psPartkey";

  List<RelTraitDef> traitDefs = new ArrayList<>();
  traitDefs.add(ConventionTraitDef.INSTANCE);
  traitDefs.add(RelCollationTraitDef.INSTANCE);
  final SqlParser.Config parserConfig =
      SqlParser.configBuilder().setLex(Lex.MYSQL).build();
  FrameworkConfig config = Frameworks.newConfigBuilder()
      .parserConfig(parserConfig)
      .defaultSchema(schema)
      .traitDefs(traitDefs)
      .programs(Programs.ofRules(Programs.RULE_SET))
      .build();
  String plan;
  try (Planner p = Frameworks.getPlanner(config)) {
    SqlNode n = p.parse(query);
    n = p.validate(n);
    RelNode r = p.rel(n).project();
    plan = RelOptUtil.toString(r);
    plan = Util.toLinux(plan);
  }
  assertThat(plan,
      equalTo("LogicalSort(sort0=[$0], dir0=[ASC])\n"
      + "  LogicalProject(psPartkey=[$0])\n"
      + "    LogicalTableScan(table=[[tpch, partsupp]])\n"));
}
 
Example 18
Source Project: calcite   Source File: PlannerTest.java    License: Apache License 2.0 5 votes vote down vote up
private Planner getPlanner(List<RelTraitDef> traitDefs,
                           SqlParser.Config parserConfig,
                           Program... programs) {
  final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
  final FrameworkConfig config = Frameworks.newConfigBuilder()
      .parserConfig(parserConfig)
      .defaultSchema(
          CalciteAssert.addSchema(rootSchema, CalciteAssert.SchemaSpec.HR))
      .traitDefs(traitDefs)
      .programs(programs)
      .build();
  return Frameworks.getPlanner(config);
}
 
Example 19
Source Project: Bats   Source File: ServerMetaProvider.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void run() {
  final GetServerMetaResp.Builder respBuilder = GetServerMetaResp.newBuilder();
  try {
    final ServerMeta.Builder metaBuilder = ServerMeta.newBuilder(DEFAULT);
    PlannerSettings plannerSettings = new PlannerSettings(session.getOptions(), context.getFunctionImplementationRegistry());

    DrillParserConfig config = new DrillParserConfig(plannerSettings);

    int identifierMaxLength = config.identifierMaxLength();
    Metadata metadata = SqlParser.create("", config).getMetadata();
    metaBuilder
      .setMaxCatalogNameLength(identifierMaxLength)
      .setMaxColumnNameLength(identifierMaxLength)
      .setMaxCursorNameLength(identifierMaxLength)
      .setMaxSchemaNameLength(identifierMaxLength)
      .setMaxTableNameLength(identifierMaxLength)
      .setMaxUserNameLength(identifierMaxLength)
      .setIdentifierQuoteString(config.quoting().string)
      .setIdentifierCasing(getIdentifierCasing(config.unquotedCasing(), config.caseSensitive()))
      .setQuotedIdentifierCasing(getIdentifierCasing(config.quotedCasing(), config.caseSensitive()))
      .addAllSqlKeywords(Splitter.on(",").split(metadata.getJdbcKeywords()))
      .setCurrentSchema(session.getDefaultSchemaPath());
    respBuilder.setServerMeta(metaBuilder);
    respBuilder.setStatus(RequestStatus.OK);
  } catch(Throwable t) {
    respBuilder.setStatus(RequestStatus.FAILED);
    respBuilder.setError(MetadataProvider.createPBError("server meta", t));
  } finally {
    responseSender.send(new Response(RpcType.SERVER_META, respBuilder.build()));
  }
}
 
Example 20
Source Project: calcite   Source File: RexSqlStandardConvertletTableTest.java    License: Apache License 2.0 5 votes vote down vote up
private RelNode convertSqlToRel(String sql, boolean simplifyRex) {
  final FrameworkConfig config = Frameworks.newConfigBuilder()
          .defaultSchema(CalciteSchema.createRootSchema(false).plus())
          .parserConfig(SqlParser.configBuilder().build())
          .build();
  final Planner planner = Frameworks.getPlanner(config);
  try (Closer closer = new Closer()) {
    closer.add(Hook.REL_BUILDER_SIMPLIFY.addThread(Hook.propertyJ(simplifyRex)));
    final SqlNode parsed = planner.parse(sql);
    final SqlNode validated = planner.validate(parsed);
    return planner.rel(validated).rel;
  } catch (SqlParseException | RelConversionException | ValidationException e) {
    throw TestUtil.rethrow(e);
  }
}
 
Example 21
Source Project: calcite   Source File: SqlPrettyWriterTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Parses a SQL query. To use a different parser, override this method.
 */
protected SqlNode parseQuery(String sql) {
  SqlNode node;
  try {
    node = SqlParser.create(sql).parseQuery();
  } catch (SqlParseException e) {
    String message = "Received error while parsing SQL '" + sql + "'"
        + "; error is:\n"
        + e.toString();
    throw new AssertionError(message);
  }
  return node;
}
 
Example 22
Source Project: flink   Source File: FlinkSqlParserImplTest.java    License: Apache License 2.0 5 votes vote down vote up
protected SqlParser getSqlParser(Reader source) {
	if (conformance0 == null) {
		return super.getSqlParser(source);
	} else {
		// overwrite the default sql conformance.
		return SqlParser.create(source,
			SqlParser.configBuilder()
				.setParserFactory(parserImplFactory())
				.setQuoting(Quoting.DOUBLE_QUOTE)
				.setUnquotedCasing(Casing.TO_UPPER)
				.setQuotedCasing(Casing.UNCHANGED)
				.setConformance(conformance0)
				.build());
	}
}
 
Example 23
Source Project: flink   Source File: FlinkDDLDataTypeTest.java    License: Apache License 2.0 5 votes vote down vote up
private SqlParser getSqlParser(String sql) {
	return SqlParser.create(sql,
		SqlParser.configBuilder()
			.setParserFactory(FlinkSqlParserImpl.FACTORY)
			.setQuoting(Quoting.BACK_TICK)
			.setUnquotedCasing(Casing.UNCHANGED)
			.setQuotedCasing(Casing.UNCHANGED)
			.setConformance(conformance)
			.build());
}
 
Example 24
Source Project: alchemy   Source File: MysqlSideFunction.java    License: Apache License 2.0 5 votes vote down vote up
private String modifySql(SideTable sideTable) throws SqlParseException {
    SqlParser.Config config = SqlParser.configBuilder().setLex(Lex.MYSQL).build();
    SqlParser sqlParser = SqlParser.create(sideTable.getSql(), config);
    SqlNode sqlNode = sqlParser.parseStmt();
    if (SqlKind.SELECT != sqlNode.getKind()) {
        throw new UnsupportedOperationException(
            "MysqlAsyncReqRow only support query sql, sql:" + sideTable.getSql());
    }
    SqlSelect sqlSelect = (SqlSelect)sqlNode;
    SqlNode whereNode = sqlSelect.getWhere();
    SqlBinaryOperator and = new SqlBinaryOperator("AND", SqlKind.AND, 24, true,
        ReturnTypes.BOOLEAN_NULLABLE_OPTIMIZED, InferTypes.BOOLEAN, OperandTypes.BOOLEAN_BOOLEAN);
    List<SqlBasicCall> conditionNodes = createConditionNodes(sideTable.getConditions(), sideTable.getSideAlias());
    List<SqlNode> nodes = new ArrayList<>();
    nodes.addAll(conditionNodes);
    if (whereNode != null) {
        nodes.add(whereNode);
    } else {
        SqlBinaryOperator equal = new SqlBinaryOperator("=", SqlKind.EQUALS, 30, true, ReturnTypes.BOOLEAN_NULLABLE,
            InferTypes.FIRST_KNOWN, OperandTypes.COMPARABLE_UNORDERED_COMPARABLE_UNORDERED);
        SqlBasicCall andEqual
            = new SqlBasicCall(equal, SideParser.createEqualNodes(SqlKind.AND), new SqlParserPos(0, 0));
        nodes.add(andEqual);
    }
    SqlBasicCall sqlBasicCall
        = new SqlBasicCall(and, nodes.toArray(new SqlNode[nodes.size()]), new SqlParserPos(0, 0));
    sqlSelect.setWhere(sqlBasicCall);
    return sqlSelect.toString();
}
 
Example 25
Source Project: alchemy   Source File: SqlParseUtil.java    License: Apache License 2.0 5 votes vote down vote up
public static String parseSinkName(String sql) throws SqlParseException {
    SqlParser sqlParser = SqlParser.create(sql, CONFIG);
    SqlNode sqlNode = sqlParser.parseStmt();
    SqlKind sqlKind = sqlNode.getKind();
    if (sqlKind != SqlKind.INSERT) {
        throw new IllegalArgumentException("It must be an insert SQL, sql:" + sql);
    }
    return findSinkName((SqlInsert)sqlNode);
}
 
Example 26
Source Project: calcite   Source File: SqlToRelTestBase.java    License: Apache License 2.0 5 votes vote down vote up
@Override public RelRoot expandView(RelDataType rowType, String queryString,
    List<String> schemaPath, List<String> viewPath) {
  try {
    SqlNode parsedNode = SqlParser.create(queryString).parseStmt();
    SqlNode validatedNode = validator.validate(parsedNode);
    SqlToRelConverter converter = new SqlToRelConverter(
        this, validator, catalogReader, cluster,
        StandardConvertletTable.INSTANCE, config);
    return converter.convertQuery(validatedNode, false, true);
  } catch (SqlParseException e) {
    throw new RuntimeException("Error happened while expanding view.", e);
  }
}
 
Example 27
Source Project: marble   Source File: TableEnv.java    License: Apache License 2.0 5 votes vote down vote up
private CalciteConnectionConfig createDefaultConnectionConfig(
    SqlParser.Config sqlParserConfig) {
  Properties prop = new Properties();
  prop.setProperty(CalciteConnectionProperty.CASE_SENSITIVE.camelName(),
      String.valueOf(sqlParserConfig.caseSensitive()));
  return new CalciteConnectionConfigImpl(prop);
}
 
Example 28
Source Project: kylin-on-parquet-v2   Source File: SqlConverter.java    License: Apache License 2.0 5 votes vote down vote up
public String convertColumn(String column, String originQuote) {
    String converted = column.replace(originQuote, "");
    try {
        SqlNode sqlNode = SqlParser.create(converted).parseExpression();
        sqlNode = sqlNode.accept(sqlNodeConverter);
        converted = sqlWriter.format(sqlNode);
    } catch (Throwable e) {
        logger.error("Failed to default convert Column, will use the input: {}", column, e);
    } finally {
        sqlWriter.reset();
    }
    return converted;
}
 
Example 29
Source Project: incubator-pinot   Source File: CalciteSqlParser.java    License: Apache License 2.0 5 votes vote down vote up
private static SqlParser getSqlParser(String sql) {
  // TODO: Check if this can be converted to static or thread local.
  SqlParser.ConfigBuilder parserBuilder = SqlParser.configBuilder();
  parserBuilder.setLex(PINOT_LEX);

  // BABEL is a very liberal conformance value that allows anything supported by any dialect
  parserBuilder.setConformance(SqlConformanceEnum.BABEL);
  parserBuilder.setParserFactory(SqlBabelParserImpl.FACTORY);

  return SqlParser.create(sql, parserBuilder.build());
}
 
Example 30
Source Project: kylin-on-parquet-v2   Source File: SqlParamsFinderTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testWindowCallParams() throws SqlParseException {
    SqlParser sqlParser1 = SqlParser.create("STDDEV_POP($0) OVER($1)");
    SqlNode sqlPattern = sqlParser1.parseExpression();
    SqlParser sqlParser2 = SqlParser.create("STDDEV_POP(C1) OVER (ORDER BY C1)");
    SqlNode sqlCall = sqlParser2.parseExpression();
    SqlParamsFinder sqlParamsFinder = SqlParamsFinder.newInstance((SqlCall)sqlPattern, (SqlCall)sqlCall, true);
    Map<Integer, SqlNode> paramNodes =  sqlParamsFinder.getParamNodes();

    Assert.assertEquals("C1", paramNodes.get(0).toString());
    Assert.assertEquals("(ORDER BY `C1`)", paramNodes.get(1).toString());
    Assert.assertTrue(paramNodes.get(1) instanceof SqlWindow);
}