org.apache.calcite.sql.parser.SqlParser Java Examples

The following examples show how to use org.apache.calcite.sql.parser.SqlParser. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: PlannerContext.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Returns the SQL parser config for this environment including a custom Calcite configuration.
 */
private SqlParser.Config getSqlParserConfig() {
	return JavaScalaConversionUtil.<SqlParser.Config>toJava(getCalciteConfig(tableConfig).getSqlParserConfig()).orElseGet(
			// we use Java lex because back ticks are easier than double quotes in programming
			// and cases are preserved
			() -> {
				SqlConformance conformance = getSqlConformance();
				return SqlParser
						.configBuilder()
						.setParserFactory(FlinkSqlParserFactories.create(conformance))
						.setConformance(conformance)
						.setLex(Lex.JAVA)
						.setIdentifierMaxLength(256)
						.build();
			}
	);
}
 
Example #2
Source File: PlannerTest.java    From calcite with Apache License 2.0 6 votes vote down vote up
/** Checks that a query returns a particular plan, using a planner with
 * MultiJoinOptimizeBushyRule enabled. */
private void checkBushy(String sql, String expected) throws Exception {
  final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
  final FrameworkConfig config = Frameworks.newConfigBuilder()
      .parserConfig(SqlParser.Config.DEFAULT)
      .defaultSchema(
          CalciteAssert.addSchema(rootSchema,
              CalciteAssert.SchemaSpec.CLONE_FOODMART))
      .traitDefs((List<RelTraitDef>) null)
      .programs(Programs.heuristicJoinOrder(Programs.RULE_SET, true, 2))
      .build();
  Planner planner = Frameworks.getPlanner(config);
  SqlNode parse = planner.parse(sql);

  SqlNode validate = planner.validate(parse);
  RelNode convert = planner.rel(validate).project();
  RelTraitSet traitSet = convert.getTraitSet()
      .replace(EnumerableConvention.INSTANCE);
  RelNode transform = planner.transform(0, traitSet, convert);
  assertThat(toString(transform), containsString(expected));
}
 
Example #3
Source File: SqlParseUtil.java    From alchemy with Apache License 2.0 6 votes vote down vote up
public static void parse(List<String> sqls, List<String> sources, List<String> udfs, List<String> sinks)
    throws SqlParseException {
    for (String sql : sqls) {
        SqlParser sqlParser = SqlParser.create(sql, CONFIG);
        SqlNode sqlNode = sqlParser.parseStmt();
        SqlKind kind = sqlNode.getKind();
        switch (kind){
            case INSERT:
                SqlInsert sqlInsert = (SqlInsert)sqlNode;
                addSink(sinks, findSinkName(sqlInsert));
                SqlSelect source = (SqlSelect) sqlInsert.getSource();
                parseSource(source, sources, udfs);
                break;
            case SELECT:
                parseSource((SqlSelect) sqlNode, sources, udfs);
                break;
            default:
                throw new IllegalArgumentException("It must be an insert SQL, sql:" + sql);
        }
    }
}
 
Example #4
Source File: SqlConverter.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
public String convertSql(String orig) {
    // for jdbc source, convert quote from backtick to double quote
    String converted = orig.replaceAll("`", "\"");

    if (!configurer.skipHandleDefault()) {
        String escapedDefault = SqlDialect.CALCITE
                .quoteIdentifier(configurer.useUppercaseDefault() ? "DEFAULT" : "default");
        converted = converted.replaceAll("(?i)default\\.", escapedDefault + "."); // use Calcite dialect to cater to SqlParser
        converted = converted.replaceAll("\"(?i)default\"\\.", escapedDefault + ".");
    }

    if (!configurer.skipDefaultConvert()) {
        try {
            SqlNode sqlNode = SqlParser.create(converted).parseQuery();
            sqlNode = sqlNode.accept(sqlNodeConverter);
            converted = sqlWriter.format(sqlNode);
        } catch (Throwable e) {
            logger.error("Failed to default convert sql, will use the input: {}", orig, e);
        } finally {
            sqlWriter.reset();
        }
    }
    converted = configurer.fixAfterDefaultConvert(converted);
    return converted;
}
 
Example #5
Source File: SortRemoveRuleTest.java    From calcite with Apache License 2.0 6 votes vote down vote up
/**
 * The default schema that is used in these tests provides tables sorted on the primary key. Due
 * to this scan operators always come with a {@link org.apache.calcite.rel.RelCollation} trait.
 */
private RelNode transform(String sql, RuleSet prepareRules) throws Exception {
  final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
  final SchemaPlus defSchema = rootSchema.add("hr", new HrClusteredSchema());
  final FrameworkConfig config = Frameworks.newConfigBuilder()
      .parserConfig(SqlParser.Config.DEFAULT)
      .defaultSchema(defSchema)
      .traitDefs(ConventionTraitDef.INSTANCE, RelCollationTraitDef.INSTANCE)
      .programs(
          Programs.of(prepareRules),
          Programs.ofRules(SortRemoveRule.INSTANCE))
      .build();
  Planner planner = Frameworks.getPlanner(config);
  SqlNode parse = planner.parse(sql);
  SqlNode validate = planner.validate(parse);
  RelRoot planRoot = planner.rel(validate);
  RelNode planBefore = planRoot.rel;
  RelTraitSet desiredTraits = planBefore.getTraitSet()
      .replace(EnumerableConvention.INSTANCE);
  RelNode planAfter = planner.transform(0, desiredTraits, planBefore);
  return planner.transform(1, desiredTraits, planAfter);
}
 
Example #6
Source File: QuarkMetaImpl.java    From quark with Apache License 2.0 6 votes vote down vote up
private ImmutableMap.Builder<DatabaseProperty, Object> addProperty(
    ImmutableMap.Builder<DatabaseProperty, Object> builder,
    DatabaseProperty p) {
  switch (p) {
    case GET_S_Q_L_KEYWORDS:
      return builder.put(p,
          SqlParser.create("").getMetadata().getJdbcKeywords());
    case GET_NUMERIC_FUNCTIONS:
      return builder.put(p, SqlJdbcFunctionCall.getNumericFunctions());
    case GET_STRING_FUNCTIONS:
      return builder.put(p, SqlJdbcFunctionCall.getStringFunctions());
    case GET_SYSTEM_FUNCTIONS:
      return builder.put(p, SqlJdbcFunctionCall.getSystemFunctions());
    case GET_TIME_DATE_FUNCTIONS:
      return builder.put(p, SqlJdbcFunctionCall.getTimeDateFunctions());
    default:
      return builder;
  }
}
 
Example #7
Source File: ParserFactory.java    From quark with Apache License 2.0 6 votes vote down vote up
public Parser getParser(String sql, Properties info)
    throws SQLException {
  SqlParser parser = SqlParser.create(sql,
      SqlParser.configBuilder()
          .setQuotedCasing(Casing.UNCHANGED)
          .setUnquotedCasing(Casing.UNCHANGED)
          .setQuoting(Quoting.DOUBLE_QUOTE)
          .setParserFactory(QuarkParserImpl.FACTORY)
          .build());
  SqlNode sqlNode;
  try {
    sqlNode = parser.parseStmt();
  } catch (SqlParseException e) {
    throw new RuntimeException(
        "parse failed: " + e.getMessage(), e);
  }
  if (sqlNode.getKind().equals(SqlKind.OTHER_DDL)) {
    return new DDLParser();
  } else  {
    return getSqlQueryParser(info);
  }
}
 
Example #8
Source File: SqlReservedKeywordGenerator.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
  if (args.length != 1) {
    throw new IllegalArgumentException("Usage: java {cp} " + SqlReservedKeywordGenerator.class.getName() +
        " path/where/to/write/the/file");
  }

  final File outputFile = new File(args[0], RESERVED_KEYWORD_FILE_NAME);
  System.out.println("Writing reserved SQL keywords to file: " + outputFile.getAbsolutePath());

  try(PrintWriter outFile = new PrintWriter(new OutputStreamWriter(new FileOutputStream(outputFile), UTF_8))) {
    outFile.printf("# AUTO-GENERATED LIST OF SQL RESERVED KEYWORDS (generated by %s)",
        SqlReservedKeywordGenerator.class.getName());
    outFile.println();

    final SqlAbstractParserImpl.Metadata metadata = SqlParser.create("", new ParserConfig(Quoting.DOUBLE_QUOTE, 256)).getMetadata();
    for (String s : metadata.getTokens()) {
      if (metadata.isKeyword(s) && metadata.isReservedWord(s)) {
        outFile.println(s);
      }
    }
  }
}
 
Example #9
Source File: PlannerContext.java    From flink with Apache License 2.0 6 votes vote down vote up
private FlinkCalciteCatalogReader createCatalogReader(
		boolean lenientCaseSensitivity,
		String currentCatalog,
		String currentDatabase) {
	SqlParser.Config sqlParserConfig = getSqlParserConfig();
	final boolean caseSensitive;
	if (lenientCaseSensitivity) {
		caseSensitive = false;
	} else {
		caseSensitive = sqlParserConfig.caseSensitive();
	}

	SqlParser.Config newSqlParserConfig = SqlParser.configBuilder(sqlParserConfig)
			.setCaseSensitive(caseSensitive)
			.build();

	SchemaPlus rootSchema = getRootSchema(this.rootSchema.plus());
	return new FlinkCalciteCatalogReader(
			CalciteSchema.from(rootSchema),
			asList(
					asList(currentCatalog, currentDatabase),
					singletonList(currentCatalog)
			),
			typeFactory,
			CalciteConfig$.MODULE$.connectionConfig(newSqlParserConfig));
}
 
Example #10
Source File: AbstractMaterializedViewTest.java    From calcite with Apache License 2.0 6 votes vote down vote up
private RelNode toRel(RelOptCluster cluster, SchemaPlus rootSchema,
    SchemaPlus defaultSchema, String sql) throws SqlParseException {
  final SqlParser parser = SqlParser.create(sql, SqlParser.Config.DEFAULT);
  final SqlNode parsed = parser.parseStmt();

  final CalciteCatalogReader catalogReader = new CalciteCatalogReader(
      CalciteSchema.from(rootSchema),
      CalciteSchema.from(defaultSchema).path(null),
      new JavaTypeFactoryImpl(), new CalciteConnectionConfigImpl(new Properties()));

  final SqlValidator validator = new ValidatorForTest(SqlStdOperatorTable.instance(),
      catalogReader, new JavaTypeFactoryImpl(), SqlConformanceEnum.DEFAULT);
  final SqlNode validated = validator.validate(parsed);
  final SqlToRelConverter.Config config = SqlToRelConverter.configBuilder()
      .withTrimUnusedFields(true)
      .withExpand(true)
      .withDecorrelationEnabled(true)
      .build();
  final SqlToRelConverter converter = new SqlToRelConverter(
      (rowType, queryString, schemaPath, viewPath) -> {
        throw new UnsupportedOperationException("cannot expand view");
      }, validator, catalogReader, cluster, StandardConvertletTable.INSTANCE, config);
  return converter.convertQuery(validated, false, true).rel;
}
 
Example #11
Source File: FrameworksTest.java    From calcite with Apache License 2.0 6 votes vote down vote up
/** Test case for
 * <a href="https://issues.apache.org/jira/browse/CALCITE-2039">[CALCITE-2039]
 * AssertionError when pushing project to ProjectableFilterableTable</a>
 * using UPDATE via {@link Frameworks}. */
@Test void testUpdate() throws Exception {
  Table table = new TableImpl();
  final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
  SchemaPlus schema = rootSchema.add("x", new AbstractSchema());
  schema.add("MYTABLE", table);
  List<RelTraitDef> traitDefs = new ArrayList<>();
  traitDefs.add(ConventionTraitDef.INSTANCE);
  traitDefs.add(RelDistributionTraitDef.INSTANCE);
  SqlParser.Config parserConfig =
      SqlParser.configBuilder(SqlParser.Config.DEFAULT)
          .setCaseSensitive(false)
          .build();

  final FrameworkConfig config = Frameworks.newConfigBuilder()
      .parserConfig(parserConfig)
      .defaultSchema(schema)
      .traitDefs(traitDefs)
      // define the rules you want to apply
      .ruleSets(
          RuleSets.ofList(AbstractConverter.ExpandConversionRule.INSTANCE))
      .programs(Programs.ofRules(Programs.RULE_SET))
      .build();
  executeQuery(config, " UPDATE MYTABLE set id=7 where id=1",
      CalciteSystemProperty.DEBUG.value());
}
 
Example #12
Source File: PlannerTest.java    From calcite with Apache License 2.0 6 votes vote down vote up
private void runDuplicateSortCheck(String sql, String plan) throws Exception {
  RuleSet ruleSet =
      RuleSets.ofList(
          SortRemoveRule.INSTANCE,
          EnumerableRules.ENUMERABLE_TABLE_SCAN_RULE,
          EnumerableRules.ENUMERABLE_PROJECT_RULE,
          EnumerableRules.ENUMERABLE_WINDOW_RULE,
          EnumerableRules.ENUMERABLE_SORT_RULE,
          ProjectToWindowRule.PROJECT);
  Planner planner = getPlanner(null,
      SqlParser.configBuilder().setLex(Lex.JAVA).build(),
      Programs.of(ruleSet));
  SqlNode parse = planner.parse(sql);
  SqlNode validate = planner.validate(parse);
  RelNode convert = planner.rel(validate).rel;
  RelTraitSet traitSet = convert.getTraitSet()
      .replace(EnumerableConvention.INSTANCE);
  if (traitSet.getTrait(RelCollationTraitDef.INSTANCE) == null) {
    // SortRemoveRule can only work if collation trait is enabled.
    return;
  }
  RelNode transform = planner.transform(0, traitSet, convert);
  assertThat(toString(transform), equalTo(plan));
}
 
Example #13
Source File: CalciteMetaImpl.java    From calcite with Apache License 2.0 6 votes vote down vote up
private ImmutableMap.Builder<DatabaseProperty, Object> addProperty(
    ImmutableMap.Builder<DatabaseProperty, Object> builder,
    DatabaseProperty p) {
  switch (p) {
  case GET_S_Q_L_KEYWORDS:
    return builder.put(p,
        SqlParser.create("").getMetadata().getJdbcKeywords());
  case GET_NUMERIC_FUNCTIONS:
    return builder.put(p, SqlJdbcFunctionCall.getNumericFunctions());
  case GET_STRING_FUNCTIONS:
    return builder.put(p, SqlJdbcFunctionCall.getStringFunctions());
  case GET_SYSTEM_FUNCTIONS:
    return builder.put(p, SqlJdbcFunctionCall.getSystemFunctions());
  case GET_TIME_DATE_FUNCTIONS:
    return builder.put(p, SqlJdbcFunctionCall.getTimeDateFunctions());
  default:
    return builder;
  }
}
 
Example #14
Source File: RelBuilderTest.java    From calcite with Apache License 2.0 5 votes vote down vote up
/** Creates a config based on the "scott" schema. */
public static Frameworks.ConfigBuilder config() {
  final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
  return Frameworks.newConfigBuilder()
      .parserConfig(SqlParser.Config.DEFAULT)
      .defaultSchema(
          CalciteAssert.addSchema(rootSchema, CalciteAssert.SchemaSpec.SCOTT_WITH_TEMPORAL))
      .traitDefs((List<RelTraitDef>) null)
      .programs(Programs.heuristicJoinOrder(Programs.RULE_SET, true, 2));
}
 
Example #15
Source File: QueryRecord.java    From nifi with Apache License 2.0 5 votes vote down vote up
@Override
public ValidationResult validate(final String subject, final String input, final ValidationContext context) {
    if (context.isExpressionLanguagePresent(input)) {
        return new ValidationResult.Builder()
            .input(input)
            .subject(subject)
            .valid(true)
            .explanation("Expression Language Present")
            .build();
    }

    final String substituted = context.newPropertyValue(input).evaluateAttributeExpressions().getValue();

    final Config config = SqlParser.configBuilder()
        .setLex(Lex.MYSQL_ANSI)
        .build();

    final SqlParser parser = SqlParser.create(substituted, config);
    try {
        parser.parseStmt();
        return new ValidationResult.Builder()
            .subject(subject)
            .input(input)
            .valid(true)
            .build();
    } catch (final Exception e) {
        return new ValidationResult.Builder()
            .subject(subject)
            .input(input)
            .valid(false)
            .explanation("Not a valid SQL Statement: " + e.getMessage())
            .build();
    }
}
 
Example #16
Source File: CalciteParser.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Parses a SQL statement into a {@link SqlNode}. The {@link SqlNode} is not yet validated.
 *
 * @param sql a sql string to parse
 * @return a parsed sql node
 * @throws SqlParserException if an exception is thrown when parsing the statement
 */
public SqlNode parse(String sql) {
	try {
		SqlParser parser = SqlParser.create(sql, config);
		return parser.parseStmt();
	} catch (SqlParseException e) {
		throw new SqlParserException("SQL parse failed. " + e.getMessage(), e);
	}
}
 
Example #17
Source File: QuarkTestUtil.java    From quark with Apache License 2.0 5 votes vote down vote up
public static void checkSqlParsing(String sql, Properties info, String expectedSql,
    SqlDialect dialect)
    throws QuarkException, SqlParseException {
  SqlQueryParser parser = new SqlQueryParser(info);
  SqlParser sqlParser = parser.getSqlParser(sql);
  SqlNode sqlNode = sqlParser.parseQuery();
  String finalQuery = sqlNode.toSqlString(dialect).getSql();
  assertEquals(expectedSql, finalQuery.replace("\n", " "));
}
 
Example #18
Source File: SqlToOperationConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
private String getQuotedSqlString(SqlNode sqlNode) {
	SqlParser.Config parserConfig = flinkPlanner.config().getParserConfig();
	SqlDialect dialect = new CalciteSqlDialect(SqlDialect.EMPTY_CONTEXT
		.withQuotedCasing(parserConfig.unquotedCasing())
		.withConformance(parserConfig.conformance())
		.withUnquotedCasing(parserConfig.unquotedCasing())
		.withIdentifierQuoteString(parserConfig.quoting().string));
	return sqlNode.toSqlString(dialect).getSql();
}
 
Example #19
Source File: SqlAdvisor.java    From calcite with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a SqlAdvisor with a validator instance and given parser configuration
 *
 * @param validator Validator
 * @param parserConfig parser config
 */
public SqlAdvisor(
    SqlValidatorWithHints validator,
    SqlParser.Config parserConfig) {
  this.validator = validator;
  this.parserConfig = parserConfig;
}
 
Example #20
Source File: PlanningConfigurationBuilder.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Returns the SQL parser config for this environment including a custom Calcite configuration.
 */
public SqlParser.Config getSqlParserConfig() {
	return JavaScalaConversionUtil.toJava(calciteConfig(tableConfig).sqlParserConfig()).orElseGet(() ->
		// we use Java lex because back ticks are easier than double quotes in programming
		// and cases are preserved
		SqlParser
			.configBuilder()
			.setParserFactory(FlinkSqlParserImpl.FACTORY)
			.setConformance(getSqlConformance())
			.setLex(Lex.JAVA)
			.build());
}
 
Example #21
Source File: TpcdsLatticeSuggesterTest.java    From calcite with Apache License 2.0 5 votes vote down vote up
static Frameworks.ConfigBuilder config(CalciteAssert.SchemaSpec spec) {
  final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
  final SchemaPlus schema = CalciteAssert.addSchema(rootSchema, spec);
  return Frameworks.newConfigBuilder()
      .parserConfig(SqlParser.Config.DEFAULT)
      .defaultSchema(schema);
}
 
Example #22
Source File: FrameworksTest.java    From calcite with Apache License 2.0 5 votes vote down vote up
/** Test case for
 * <a href="https://issues.apache.org/jira/browse/CALCITE-3228">[CALCITE-3228]
 * Error while applying rule ProjectScanRule:interpreter</a>
 *
 * <p>This bug appears under the following conditions:
 * 1) have an aggregate with group by and multi aggregate calls.
 * 2) the aggregate can be removed during optimization.
 * 3) all aggregate calls are simplified to the same reference.
 * */
@Test void testPushProjectToScan() throws Exception {
  Table table = new TableImpl();
  final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
  SchemaPlus schema = rootSchema.add("x", new AbstractSchema());
  schema.add("MYTABLE", table);
  List<RelTraitDef> traitDefs = new ArrayList<>();
  traitDefs.add(ConventionTraitDef.INSTANCE);
  traitDefs.add(RelDistributionTraitDef.INSTANCE);
  SqlParser.Config parserConfig =
          SqlParser.configBuilder(SqlParser.Config.DEFAULT)
                  .setCaseSensitive(false)
                  .build();

  final FrameworkConfig config = Frameworks.newConfigBuilder()
          .parserConfig(parserConfig)
          .defaultSchema(schema)
          .traitDefs(traitDefs)
          // define the rules you want to apply
          .ruleSets(
                  RuleSets.ofList(AbstractConverter.ExpandConversionRule.INSTANCE,
                          ProjectTableScanRule.INSTANCE))
          .programs(Programs.ofRules(Programs.RULE_SET))
          .build();

  executeQuery(config, "select min(id) as mi, max(id) as ma from mytable where id=1 group by id",
          CalciteSystemProperty.DEBUG.value());
}
 
Example #23
Source File: CalciteQueryValidator.java    From components with Apache License 2.0 5 votes vote down vote up
@Override
public boolean isValid(final String query) {
    String checkedQuery = trimQuery(query);
    Boolean result = null;
    try {
        SqlNode parsedNode = SqlParser.create(checkedQuery).parseQuery();
        result = parsedNode.accept(whitelistVisitor);
        if (result == null) {
            result = true;
        }
    } catch (SqlParseException e) {
        return false;
    }
    return result;
}
 
Example #24
Source File: ParserBenchmark.java    From calcite with Apache License 2.0 5 votes vote down vote up
@Setup
public void setup() throws SqlParseException {
  StringBuilder sb = new StringBuilder((int) (length * 1.2));
  sb.append("select 1");
  Random rnd = new Random();
  rnd.setSeed(424242);
  for (; sb.length() < length;) {
    for (int i = 0; i < 7 && sb.length() < length; i++) {
      sb.append(", ");
      switch (rnd.nextInt(3)) {
      case 0:
        sb.append("?");
        break;
      case 1:
        sb.append(rnd.nextInt());
        break;
      case 2:
        sb.append('\'').append(rnd.nextLong()).append(rnd.nextLong())
            .append('\'');
        break;
      }
    }
    if (comments && sb.length() < length) {
      sb.append("// sb.append('\\'').append(rnd.nextLong()).append(rnd.nextLong()).append(rnd"
          + ".nextLong())");
    }
    sb.append('\n');
  }
  sb.append(" from dual");
  parser = SqlParser.create("values(1)");
  sql = sb.toString();
}
 
Example #25
Source File: ServerMetaProvider.java    From dremio-oss with Apache License 2.0 5 votes vote down vote up
@Override
public GetServerMetaResp execute() throws Exception {
  final GetServerMetaResp.Builder respBuilder = GetServerMetaResp.newBuilder();
  final ServerMeta.Builder metaBuilder = session.getRecordBatchFormat() != RecordBatchFormat.DRILL_1_0
      ? ServerMeta.newBuilder(DEFAULT)
      : ServerMeta.newBuilder(DRILL_1_0_DEFAULT);
  PlannerSettings plannerSettings = new PlannerSettings(dContext.getConfig(),
    session.getOptions(), () -> dContext.getClusterResourceInformation());

  ParserConfig config = ParserConfig.newInstance(session, plannerSettings);

  int identifierMaxLength = config.identifierMaxLength();
  Metadata metadata = SqlParser.create("", config).getMetadata();
  metaBuilder
    .setMaxCatalogNameLength(identifierMaxLength)
    .setMaxColumnNameLength(identifierMaxLength)
    .setMaxCursorNameLength(identifierMaxLength)
    .setMaxSchemaNameLength(identifierMaxLength)
    .setMaxTableNameLength(identifierMaxLength)
    .setMaxUserNameLength(identifierMaxLength)
    .setIdentifierQuoteString(config.quoting().string)
    .setIdentifierCasing(getIdentifierCasing(config.unquotedCasing(), config.caseSensitive()))
    .setQuotedIdentifierCasing(getIdentifierCasing(config.quotedCasing(), config.caseSensitive()))
    .addAllSqlKeywords(Splitter.on(",").split(metadata.getJdbcKeywords()));
  respBuilder.setServerMeta(metaBuilder);
  respBuilder.setStatus(RequestStatus.OK);
  respBuilder.setQueryId(queryId);
  return respBuilder.build();
}
 
Example #26
Source File: PlannerImpl.java    From Mycat2 with GNU General Public License v3.0 5 votes vote down vote up
@Override
public RelRoot expandView(RelDataType rowType, String queryString,
                          List<String> schemaPath, List<String> viewPath) {
    if (planner == null) {
        ready();
    }
    SqlParser parser = SqlParser.create(queryString, parserConfig);
    SqlNode sqlNode;
    try {
        sqlNode = parser.parseQuery();
    } catch (SqlParseException e) {
        throw new RuntimeException("parse failed", e);
    }

    final CalciteCatalogReader catalogReader =
            createCatalogReader().withSchemaPath(schemaPath);
    final SqlValidator validator = createSqlValidator(catalogReader);

    final RexBuilder rexBuilder = createRexBuilder();
    final RelOptCluster cluster = RelOptCluster.create(planner, rexBuilder);
    final SqlToRelConverter.Config config = SqlToRelConverter
            .configBuilder()
            .withConfig(sqlToRelConverterConfig)
            .withTrimUnusedFields(false)
            .build();
    final SqlToRelConverter sqlToRelConverter =
            new SqlToRelConverter(this, validator,
                    catalogReader, cluster, convertletTable, config);

    final RelRoot root =
            sqlToRelConverter.convertQuery(sqlNode, true, false);
    final RelRoot root2 =
            root.withRel(sqlToRelConverter.flattenTypes(root.rel, true));
    final RelBuilder relBuilder =
            config.getRelBuilderFactory().create(cluster, null);
    return root2.withRel(
            RelDecorrelator.decorrelateQuery(root.rel, relBuilder));
}
 
Example #27
Source File: SqlStatisticProviderTest.java    From calcite with Apache License 2.0 5 votes vote down vote up
/** Creates a config based on the "foodmart" schema. */
public static Frameworks.ConfigBuilder config() {
  final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
  return Frameworks.newConfigBuilder()
      .parserConfig(SqlParser.Config.DEFAULT)
      .defaultSchema(
          CalciteAssert.addSchema(rootSchema,
              CalciteAssert.SchemaSpec.JDBC_FOODMART))
      .traitDefs((List<RelTraitDef>) null)
      .programs(Programs.heuristicJoinOrder(Programs.RULE_SET, true, 2));
}
 
Example #28
Source File: KSqlParser.java    From kafka-eagle with Apache License 2.0 5 votes vote down vote up
/** Parser sql mapper kafka tree. */
public static TopicPartitionSchema parserTopic(String sql) {
	TopicPartitionSchema tps = new TopicPartitionSchema();
	try {
		SqlParser.Config config = SqlParser.configBuilder().setLex(Lex.JAVA).build();
		SqlParser sqlParser = SqlParser.create(sql, config);
		SqlNode sqlNode = sqlParser.parseStmt();
		parseNode(sqlNode, tps);
	} catch (Exception e) {
		ErrorUtils.print(KSqlParser.class).error("Parser kafka sql has error, msg is ", e);
	}
	return tps;
}
 
Example #29
Source File: CalciteSqlParser.java    From sylph with Apache License 2.0 5 votes vote down vote up
public List<Object> getPlan(String joinSql, SqlParser.Config sqlParserConfig)
        throws SqlParseException
{
    SqlParser sqlParser = SqlParser.create(joinSql, sqlParserConfig);
    SqlNode sqlNode = sqlParser.parseStmt();

    SqlNode rootNode = sqlParse(sqlNode);
    plan.add(rootNode);
    return plan;
}
 
Example #30
Source File: SqlParamsFinderTest.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
@Test
public void testWindowCallParams() throws SqlParseException {
    SqlParser sqlParser1 = SqlParser.create("STDDEV_POP($0) OVER($1)");
    SqlNode sqlPattern = sqlParser1.parseExpression();
    SqlParser sqlParser2 = SqlParser.create("STDDEV_POP(C1) OVER (ORDER BY C1)");
    SqlNode sqlCall = sqlParser2.parseExpression();
    SqlParamsFinder sqlParamsFinder = SqlParamsFinder.newInstance((SqlCall)sqlPattern, (SqlCall)sqlCall, true);
    Map<Integer, SqlNode> paramNodes =  sqlParamsFinder.getParamNodes();

    Assert.assertEquals("C1", paramNodes.get(0).toString());
    Assert.assertEquals("(ORDER BY `C1`)", paramNodes.get(1).toString());
    Assert.assertTrue(paramNodes.get(1) instanceof SqlWindow);
}