Java Code Examples for org.apache.calcite.sql.validate.SqlConformanceEnum

The following examples show how to use org.apache.calcite.sql.validate.SqlConformanceEnum. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: calcite   Source File: TpcdsLatticeSuggesterTest.java    License: Apache License 2.0 6 votes vote down vote up
Tester tpcds() {
  final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
  final double scaleFactor = 0.01d;
  final SchemaPlus schema =
      rootSchema.add("tpcds", new TpcdsSchema(scaleFactor));
  final FrameworkConfig config = Frameworks.newConfigBuilder()
      .parserConfig(SqlParser.Config.DEFAULT)
      .context(
          Contexts.of(
              new CalciteConnectionConfigImpl(new Properties())
                  .set(CalciteConnectionProperty.CONFORMANCE,
                      SqlConformanceEnum.LENIENT.name())))
      .defaultSchema(schema)
      .build();
  return withConfig(config);
}
 
Example 2
Source Project: calcite   Source File: SqlDialect.java    License: Apache License 2.0 6 votes vote down vote up
/** Returns the {@link SqlConformance} that matches this dialect.
 *
 * <p>The base implementation returns its best guess, based upon
 * {@link #databaseProduct}; sub-classes may override. */
@Nonnull public SqlConformance getConformance() {
  switch (databaseProduct) {
  case UNKNOWN:
  case CALCITE:
    return SqlConformanceEnum.DEFAULT;
  case BIG_QUERY:
    return SqlConformanceEnum.BIG_QUERY;
  case MYSQL:
    return SqlConformanceEnum.MYSQL_5;
  case ORACLE:
    return SqlConformanceEnum.ORACLE_10;
  case MSSQL:
    return SqlConformanceEnum.SQL_SERVER_2008;
  default:
    return SqlConformanceEnum.PRAGMATIC_2003;
  }
}
 
Example 3
Source Project: calcite   Source File: ImmutableBeans.java    License: Apache License 2.0 6 votes vote down vote up
private static Object convertDefault(Object defaultValue, String propertyName,
    Class<?> propertyType) {
  if (propertyType.equals(SqlConformance.class)) {
    // Workaround for SqlConformance because it is actually not a Enum.
    propertyType = SqlConformanceEnum.class;
  }
  if (defaultValue == null || !propertyType.isEnum()) {
    return defaultValue;
  }
  for (Object enumConstant : propertyType.getEnumConstants()) {
    if (((Enum) enumConstant).name().equals(defaultValue)) {
      return enumConstant;
    }
  }
  throw new IllegalArgumentException("property '" + propertyName
      + "' is an enum but its default value " + defaultValue
      + " is not a valid enum constant");
}
 
Example 4
Source Project: calcite   Source File: SqlToRelTestBase.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Creates a TesterImpl.
 *
 * @param diffRepos Diff repository
 * @param enableDecorrelate Whether to decorrelate
 * @param enableTrim Whether to trim unused fields
 * @param enableExpand Whether to expand sub-queries
 * @param catalogReaderFactory Function to create catalog reader, or null
 * @param clusterFactory Called after a cluster has been created
 */
protected TesterImpl(DiffRepository diffRepos, boolean enableDecorrelate,
    boolean enableTrim, boolean enableExpand,
    boolean enableLateDecorrelate,
    boolean enableTypeCoercion,
    SqlTestFactory.MockCatalogReaderFactory
        catalogReaderFactory,
    Function<RelOptCluster, RelOptCluster> clusterFactory) {
  this(diffRepos, enableDecorrelate, enableTrim, enableExpand,
      enableLateDecorrelate,
      enableTypeCoercion,
      catalogReaderFactory,
      clusterFactory,
      SqlToRelConverter.Config.DEFAULT,
      SqlConformanceEnum.DEFAULT,
      Contexts.empty());
}
 
Example 5
Source Project: calcite   Source File: TableFunctionTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test void testCrossApply() {
  final String q1 = "select *\n"
      + "from (values 2, 5) as t (c)\n"
      + "cross apply table(\"s\".\"fibonacci2\"(c))";
  final String q2 = "select *\n"
      + "from (values 2, 5) as t (c)\n"
      + "cross apply table(\"s\".\"fibonacci2\"(t.c))";
  for (String q : new String[] {q1, q2}) {
    with()
        .with(CalciteConnectionProperty.CONFORMANCE,
            SqlConformanceEnum.LENIENT)
        .query(q)
        .returnsUnordered("C=2; N=1",
            "C=2; N=1",
            "C=2; N=2",
            "C=5; N=1",
            "C=5; N=1",
            "C=5; N=2",
            "C=5; N=3",
            "C=5; N=5");
  }
}
 
Example 6
Source Project: calcite   Source File: SqlToRelConverterTest.java    License: Apache License 2.0 6 votes vote down vote up
/** Test case for
 * <a href="https://issues.apache.org/jira/browse/CALCITE-2323">[CALCITE-2323]
 * Validator should allow alternative nullCollations for ORDER BY in
 * OVER</a>. */
@Test void testUserDefinedOrderByOver() {
  String sql = "select deptno,\n"
      + "  rank() over(partition by empno order by deptno)\n"
      + "from emp\n"
      + "order by row_number() over(partition by empno order by deptno)";
  Properties properties = new Properties();
  properties.setProperty(
      CalciteConnectionProperty.DEFAULT_NULL_COLLATION.camelName(),
      NullCollation.LOW.name());
  CalciteConnectionConfigImpl connectionConfig =
      new CalciteConnectionConfigImpl(properties);
  TesterImpl tester = new TesterImpl(getDiffRepos(), false, false, true, false, true,
      null, null, SqlToRelConverter.Config.DEFAULT,
      SqlConformanceEnum.DEFAULT, Contexts.of(connectionConfig));
  sql(sql).with(tester).ok();
}
 
Example 7
Source Project: calcite   Source File: AbstractMaterializedViewTest.java    License: Apache License 2.0 6 votes vote down vote up
private RelNode toRel(RelOptCluster cluster, SchemaPlus rootSchema,
    SchemaPlus defaultSchema, String sql) throws SqlParseException {
  final SqlParser parser = SqlParser.create(sql, SqlParser.Config.DEFAULT);
  final SqlNode parsed = parser.parseStmt();

  final CalciteCatalogReader catalogReader = new CalciteCatalogReader(
      CalciteSchema.from(rootSchema),
      CalciteSchema.from(defaultSchema).path(null),
      new JavaTypeFactoryImpl(), new CalciteConnectionConfigImpl(new Properties()));

  final SqlValidator validator = new ValidatorForTest(SqlStdOperatorTable.instance(),
      catalogReader, new JavaTypeFactoryImpl(), SqlConformanceEnum.DEFAULT);
  final SqlNode validated = validator.validate(parsed);
  final SqlToRelConverter.Config config = SqlToRelConverter.configBuilder()
      .withTrimUnusedFields(true)
      .withExpand(true)
      .withDecorrelationEnabled(true)
      .build();
  final SqlToRelConverter converter = new SqlToRelConverter(
      (rowType, queryString, schemaPath, viewPath) -> {
        throw new UnsupportedOperationException("cannot expand view");
      }, validator, catalogReader, cluster, StandardConvertletTable.INSTANCE, config);
  return converter.convertQuery(validated, false, true).rel;
}
 
Example 8
Source Project: Bats   Source File: BatsOptimizerTest.java    License: Apache License 2.0 5 votes vote down vote up
static Pair<SqlNode, SqlValidator> testSqlValidator() throws Exception {
    String sql = "select * from my_schema.test where f1=1 or f2=2 order by f3 limit 2";
    sql = "select * from test";
    sql = "select * from my_schema2.test2";
    sql = "select sum(f1),max(f2) from test";

    sql = "select t1.f1,sum(Distinct f1) as sumf1 from test as t1 "
            + "where f2>20 group by f1 having f1>10 order by f1 limit 2";
    // sql = "insert into test(f1,f2,f3) values(1,2,3)";
    // sql = "update test set f1=100 where f2>10";
    // sql = "delete from test where f2>10";
    SqlNode sqlNode = parse(sql);

    SqlOperatorTable opTab = SqlStdOperatorTable.instance();
    RelDataTypeFactory typeFactory = createJavaTypeFactory();
    SqlValidatorCatalogReader catalogReader = createCalciteCatalogReader(typeFactory);
    SqlConformance conformance = SqlConformanceEnum.DEFAULT;

    List<String> names = new ArrayList<>();
    names.add("my_schema");
    names.add("test");
    catalogReader.getTable(names);

    SqlValidator sqlValidator = SqlValidatorUtil.newValidator(opTab, catalogReader, typeFactory, conformance);
    sqlNode = sqlValidator.validate(sqlNode);
    // System.out.println(sqlNode);

    sql = "insert into test(f1,f2,f3) values(1,2,3)";
    // sqlNode = parse(sql);
    // sqlNode = sqlValidator.validate(sqlNode);

    return new Pair<>(sqlNode, sqlValidator);
}
 
Example 9
Source Project: marble   Source File: HiveRexExecutorImpl.java    License: Apache License 2.0 5 votes vote down vote up
private String compile(RexBuilder rexBuilder, List<RexNode> constExps,
    RexToLixTranslator.InputGetter getter, RelDataType rowType) {
  final RexProgramBuilder programBuilder =
      new RexProgramBuilder(rowType, rexBuilder);
  for (RexNode node : constExps) {
    programBuilder.addProject(
        node, "c" + programBuilder.getProjectList().size());
  }
  final JavaTypeFactoryImpl javaTypeFactory =
      new JavaTypeFactoryImpl(rexBuilder.getTypeFactory().getTypeSystem());
  final BlockBuilder blockBuilder = new BlockBuilder();
  final ParameterExpression root0_ =
      Expressions.parameter(Object.class, "root0");
  final ParameterExpression root_ = DataContext.ROOT;
  blockBuilder.add(
      Expressions.declare(
          Modifier.FINAL, root_,
          Expressions.convert_(root0_, DataContext.class)));
  final SqlConformance conformance = SqlConformanceEnum.HIVE;
  final RexProgram program = programBuilder.getProgram();
  final List<Expression> expressions =
      RexToLixTranslator.translateProjects(program, javaTypeFactory,
          conformance, blockBuilder, null, root_, getter, null);
  blockBuilder.add(
      Expressions.return_(null,
          Expressions.newArrayInit(Object[].class, expressions)));
  final MethodDeclaration methodDecl =
      Expressions.methodDecl(Modifier.PUBLIC, Object[].class,
          BuiltInMethod.FUNCTION1_APPLY.method.getName(),
          ImmutableList.of(root0_), blockBuilder.toBlock());
  String code = Expressions.toString(methodDecl);
  if (CalcitePrepareImpl.DEBUG) {
    Util.debugCode(System.out, code);
  }
  return code;
}
 
Example 10
Source Project: marble   Source File: HiveTableEnv.java    License: Apache License 2.0 5 votes vote down vote up
public static TableEnv getTableEnv() {
    TableConfig tableConfig = new TableConfig();
    tableConfig.setSqlOperatorTable(
        ChainedSqlOperatorTable.of(HiveSqlOperatorTable.instance(),
            SqlStdOperatorTable.instance()));
    tableConfig.setSqlParserConfig(SqlParser
        .configBuilder()
        .setLex(Lex.JAVA).setCaseSensitive(false).setConformance(
            SqlConformanceEnum.HIVE)
        .setParserFactory(HiveSqlParserImpl.FACTORY)
        .build());
//    tableConfig.setRelDataTypeSystem(new HiveTypeSystemImpl());
    Properties prop = new Properties();
    prop.setProperty(CalciteConnectionProperty.CASE_SENSITIVE.camelName(),
        String.valueOf(tableConfig.getSqlParserConfig().caseSensitive()));
    tableConfig.setCalciteConnectionConfig(
        new CalciteConnectionConfigImpl(prop));
    tableConfig.setConvertletTable(new HiveConvertletTable());
    RexExecutor rexExecutor = new HiveRexExecutorImpl(
        Schemas.createDataContext(null, null));
    tableConfig.setRexExecutor(rexExecutor);
    TableEnv tableEnv = new HiveTableEnv(tableConfig);
    //add table functions
    tableEnv.addFunction("", "explode",
        "org.apache.calcite.adapter.hive.udtf.UDTFExplode", "eval");
    return tableEnv;
  }
 
Example 11
Source Project: flink   Source File: FlinkDDLDataTypeTest.java    License: Apache License 2.0 5 votes vote down vote up
private static Map<String, Object> buildDefaultOptions() {
	final Map<String, Object> m = new HashMap<>();
	m.put("quoting", Quoting.BACK_TICK);
	m.put("quotedCasing", Casing.UNCHANGED);
	m.put("unquotedCasing", Casing.UNCHANGED);
	m.put("caseSensitive", true);
	m.put("enableTypeCoercion", false);
	m.put("conformance", SqlConformanceEnum.DEFAULT);
	m.put("operatorTable", SqlStdOperatorTable.instance());
	m.put("parserFactory", FlinkSqlParserImpl.FACTORY);
	return Collections.unmodifiableMap(m);
}
 
Example 12
Source Project: calcite   Source File: EnumerableTableFunctionScan.java    License: Apache License 2.0 5 votes vote down vote up
private Result tvfImplementorBasedImplement(
    EnumerableRelImplementor implementor, Prefer pref) {
  final JavaTypeFactory typeFactory = implementor.getTypeFactory();
  final BlockBuilder builder = new BlockBuilder();
  final EnumerableRel child = (EnumerableRel) getInputs().get(0);
  final Result result =
      implementor.visitChild(this, 0, child, pref);
  final PhysType physType = PhysTypeImpl.of(
      typeFactory, getRowType(), pref.prefer(result.format));
  final Expression inputEnumerable = builder.append(
      "_input", result.block, false);
  final SqlConformance conformance =
      (SqlConformance) implementor.map.getOrDefault("_conformance",
          SqlConformanceEnum.DEFAULT);

  builder.add(
      RexToLixTranslator.translateTableFunction(
          typeFactory,
          conformance,
          builder,
          DataContext.ROOT,
          (RexCall) getCall(),
          inputEnumerable,
          result.physType,
          physType
      )
  );

  return implementor.result(physType, builder.toBlock());
}
 
Example 13
Source Project: calcite   Source File: SqlDialect.java    License: Apache License 2.0 5 votes vote down vote up
/** Creates an empty context. Use {@link #EMPTY_CONTEXT} to reference the instance. */
private static Context emptyContext() {
  return new ContextImpl(DatabaseProduct.UNKNOWN, null, null, -1, -1,
      "'", "''", null,
      Casing.UNCHANGED, Casing.TO_UPPER, true, SqlConformanceEnum.DEFAULT,
      NullCollation.HIGH, RelDataTypeSystemImpl.DEFAULT,
      JethroDataSqlDialect.JethroInfo.EMPTY);
}
 
Example 14
Source Project: calcite   Source File: RexExecutorImpl.java    License: Apache License 2.0 5 votes vote down vote up
private static String compile(RexBuilder rexBuilder, List<RexNode> constExps,
    RexToLixTranslator.InputGetter getter, RelDataType rowType) {
  final RexProgramBuilder programBuilder =
      new RexProgramBuilder(rowType, rexBuilder);
  for (RexNode node : constExps) {
    programBuilder.addProject(
        node, "c" + programBuilder.getProjectList().size());
  }
  final JavaTypeFactoryImpl javaTypeFactory =
      new JavaTypeFactoryImpl(rexBuilder.getTypeFactory().getTypeSystem());
  final BlockBuilder blockBuilder = new BlockBuilder();
  final ParameterExpression root0_ =
      Expressions.parameter(Object.class, "root0");
  final ParameterExpression root_ = DataContext.ROOT;
  blockBuilder.add(
      Expressions.declare(
          Modifier.FINAL, root_,
          Expressions.convert_(root0_, DataContext.class)));
  final SqlConformance conformance = SqlConformanceEnum.DEFAULT;
  final RexProgram program = programBuilder.getProgram();
  final List<Expression> expressions =
      RexToLixTranslator.translateProjects(program, javaTypeFactory,
          conformance, blockBuilder, null, root_, getter, null);
  blockBuilder.add(
      Expressions.return_(null,
          Expressions.newArrayInit(Object[].class, expressions)));
  final MethodDeclaration methodDecl =
      Expressions.methodDecl(Modifier.PUBLIC, Object[].class,
          BuiltInMethod.FUNCTION1_APPLY.method.getName(),
          ImmutableList.of(root0_), blockBuilder.toBlock());
  String code = Expressions.toString(methodDecl);
  if (CalciteSystemProperty.DEBUG.value()) {
    Util.debugCode(System.out, code);
  }
  return code;
}
 
Example 15
Source Project: calcite   Source File: AbstractSqlTester.java    License: Apache License 2.0 5 votes vote down vote up
public SqlTester withConformance(SqlConformance conformance) {
  if (conformance == null) {
    conformance = SqlConformanceEnum.DEFAULT;
  }
  final SqlTester tester = with("conformance", conformance);
  if (conformance instanceof SqlConformanceEnum) {
    return tester
        .withConnectionFactory(
            CalciteAssert.EMPTY_CONNECTION_FACTORY
                .with(CalciteConnectionProperty.CONFORMANCE, conformance));
  } else {
    return tester;
  }
}
 
Example 16
Source Project: calcite   Source File: TableFunctionTest.java    License: Apache License 2.0 5 votes vote down vote up
/** Test case for
 * <a href="https://issues.apache.org/jira/browse/CALCITE-2004">[CALCITE-2004]
 * Wrong plan generated for left outer apply with table function</a>. */
@Test void testLeftOuterApply() {
  final String sql = "select *\n"
      + "from (values 4) as t (c)\n"
      + "left join lateral table(\"s\".\"fibonacci2\"(c)) as R(n) on c=n";
  with()
      .with(CalciteConnectionProperty.CONFORMANCE,
          SqlConformanceEnum.LENIENT)
      .query(sql)
      .returnsUnordered("C=4; N=null");
}
 
Example 17
Source Project: calcite   Source File: SqlToRelConverterTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Test case for
 * <a href="https://issues.apache.org/jira/browse/CALCITE-3789">[CALCITE-3789]
 * Support validation of UNNEST multiple array columns like Presto</a>.
 */
@Test public void testAliasUnnestArrayPlanWithSingleColumn() {
  final String sql = "select d.deptno, employee.empno\n"
      + "from dept_nested_expanded as d,\n"
      + " UNNEST(d.employees) as t(employee)";
  sql(sql).conformance(SqlConformanceEnum.PRESTO).ok();
}
 
Example 18
Source Project: calcite   Source File: SqlToRelConverterTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Test case for
 * <a href="https://issues.apache.org/jira/browse/CALCITE-3789">[CALCITE-3789]
 * Support validation of UNNEST multiple array columns like Presto</a>.
 */
@Test public void testAliasUnnestArrayPlanWithDoubleColumn() {
  final String sql = "select d.deptno, e, k.empno\n"
      + "from dept_nested_expanded as d CROSS JOIN\n"
      + " UNNEST(d.admins, d.employees) as t(e, k)";
  sql(sql).conformance(SqlConformanceEnum.PRESTO).ok();
}
 
Example 19
Source Project: calcite   Source File: SqlToRelConverterTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test void testWindowAndGroupByWithDynamicStar() {
  final String sql = "SELECT\n"
      + "n_regionkey,\n"
      + "MAX(MIN(n_nationkey)) OVER (PARTITION BY n_regionkey)\n"
      + "FROM (SELECT * FROM SALES.NATION)\n"
      + "GROUP BY n_regionkey";

  sql(sql).conformance(new SqlDelegatingConformance(SqlConformanceEnum.DEFAULT) {
    @Override public boolean isGroupByAlias() {
      return true;
    }
  }).with(getTesterWithDynamicTable()).ok();
}
 
Example 20
Source Project: calcite   Source File: CalciteAssert.java    License: Apache License 2.0 5 votes vote down vote up
public AssertThat with(Config config) {
  switch (config) {
  case EMPTY:
    return EMPTY;
  case REGULAR:
    return with(SchemaSpec.HR, SchemaSpec.REFLECTIVE_FOODMART,
        SchemaSpec.POST);
  case REGULAR_PLUS_METADATA:
    return with(SchemaSpec.HR, SchemaSpec.REFLECTIVE_FOODMART);
  case GEO:
    return with(SchemaSpec.GEO)
        .with(CalciteConnectionProperty.CONFORMANCE,
            SqlConformanceEnum.LENIENT);
  case LINGUAL:
    return with(SchemaSpec.LINGUAL);
  case JDBC_FOODMART:
    return with(CalciteAssert.SchemaSpec.JDBC_FOODMART);
  case FOODMART_CLONE:
    return with(SchemaSpec.CLONE_FOODMART);
  case JDBC_FOODMART_WITH_LATTICE:
    return with(SchemaSpec.JDBC_FOODMART_WITH_LATTICE);
  case JDBC_SCOTT:
    return with(SchemaSpec.JDBC_SCOTT);
  case SCOTT:
    return with(SchemaSpec.SCOTT);
  case SPARK:
    return with(CalciteConnectionProperty.SPARK, true);
  case AUX:
    return with(SchemaSpec.AUX, SchemaSpec.POST);
  default:
    throw Util.unexpected(config);
  }
}
 
Example 21
Source Project: incubator-pinot   Source File: CalciteSqlParser.java    License: Apache License 2.0 5 votes vote down vote up
private static SqlParser getSqlParser(String sql) {
  // TODO: Check if this can be converted to static or thread local.
  SqlParser.ConfigBuilder parserBuilder = SqlParser.configBuilder();
  parserBuilder.setLex(PINOT_LEX);

  // BABEL is a very liberal conformance value that allows anything supported by any dialect
  parserBuilder.setConformance(SqlConformanceEnum.BABEL);
  parserBuilder.setParserFactory(SqlBabelParserImpl.FACTORY);

  return SqlParser.create(sql, parserBuilder.build());
}
 
Example 22
Source Project: Bats   Source File: CalciteConnectionConfigImpl.java    License: Apache License 2.0 4 votes vote down vote up
public SqlConformance conformance() {
  return CalciteConnectionProperty.CONFORMANCE.wrap(properties)
      .getEnum(SqlConformanceEnum.class);
}
 
Example 23
Source Project: Bats   Source File: DrillConformance.java    License: Apache License 2.0 4 votes vote down vote up
public DrillConformance() {
  super(SqlConformanceEnum.DEFAULT);
}
 
Example 24
Source Project: Bats   Source File: DrillConformance.java    License: Apache License 2.0 4 votes vote down vote up
public DrillConformance(SqlConformanceEnum flavor) {
  super(flavor);
}
 
Example 25
Source Project: dremio-oss   Source File: DremioSqlConformance.java    License: Apache License 2.0 4 votes vote down vote up
private DremioSqlConformance() {
  super(SqlConformanceEnum.DEFAULT);
}
 
Example 26
Source Project: samza   Source File: QueryPlanner.java    License: Apache License 2.0 4 votes vote down vote up
public RelRoot plan(String query) {
  try {
    Connection connection = DriverManager.getConnection("jdbc:calcite:");
    CalciteConnection calciteConnection = connection.unwrap(CalciteConnection.class);
    SchemaPlus rootSchema = calciteConnection.getRootSchema();
    registerSourceSchemas(rootSchema);

    List<SamzaSqlScalarFunctionImpl> samzaSqlFunctions = udfMetadata.stream()
        .map(x -> new SamzaSqlScalarFunctionImpl(x))
        .collect(Collectors.toList());

    final List<RelTraitDef> traitDefs = new ArrayList<>();

    traitDefs.add(ConventionTraitDef.INSTANCE);
    traitDefs.add(RelCollationTraitDef.INSTANCE);

    List<SqlOperatorTable> sqlOperatorTables = new ArrayList<>();
    sqlOperatorTables.add(new SamzaSqlOperatorTable());
    sqlOperatorTables.add(new SamzaSqlUdfOperatorTable(samzaSqlFunctions));

    // Using lenient so that !=,%,- are allowed.
    FrameworkConfig frameworkConfig = Frameworks.newConfigBuilder()
        .parserConfig(SqlParser.configBuilder()
            .setLex(Lex.JAVA)
            .setConformance(SqlConformanceEnum.LENIENT)
            .setCaseSensitive(false) // Make Udfs case insensitive
            .build())
        .defaultSchema(rootSchema)
        .operatorTable(new ChainedSqlOperatorTable(sqlOperatorTables))
        .sqlToRelConverterConfig(SqlToRelConverter.Config.DEFAULT)
        .traitDefs(traitDefs)
        .context(Contexts.EMPTY_CONTEXT)
        .costFactory(null)
        .build();
    Planner planner = Frameworks.getPlanner(frameworkConfig);

    SqlNode sql = planner.parse(query);
    SqlNode validatedSql = planner.validate(sql);
    RelRoot relRoot = planner.rel(validatedSql);
    LOG.info("query plan:\n" + RelOptUtil.toString(relRoot.rel, SqlExplainLevel.ALL_ATTRIBUTES));
    return relRoot;
  } catch (Exception e) {
    String errorMsg = SamzaSqlValidator.formatErrorString(query, e);
    LOG.error(errorMsg, e);
    throw new SamzaException(errorMsg, e);
  }
}
 
Example 27
Source Project: flink   Source File: FlinkCalciteSqlValidator.java    License: Apache License 2.0 4 votes vote down vote up
public FlinkCalciteSqlValidator(
		SqlOperatorTable opTab,
		SqlValidatorCatalogReader catalogReader,
		RelDataTypeFactory typeFactory) {
	super(opTab, catalogReader, typeFactory, SqlConformanceEnum.DEFAULT);
}
 
Example 28
Source Project: flink   Source File: FlinkCalciteSqlValidator.java    License: Apache License 2.0 4 votes vote down vote up
public FlinkCalciteSqlValidator(
		SqlOperatorTable opTab,
		SqlValidatorCatalogReader catalogReader,
		RelDataTypeFactory typeFactory) {
	super(opTab, catalogReader, typeFactory, SqlConformanceEnum.DEFAULT);
}
 
Example 29
Source Project: calcite   Source File: BabelQuidemTest.java    License: Apache License 2.0 4 votes vote down vote up
@Override protected Quidem.ConnectionFactory createConnectionFactory() {
  return new QuidemConnectionFactory() {
    @Override public Connection connect(String name, boolean reference)
        throws Exception {
      switch (name) {
      case "babel":
        return BabelTest.connect();
      case "scott-babel":
        return CalciteAssert.that()
            .with(CalciteAssert.Config.SCOTT)
            .with(CalciteConnectionProperty.PARSER_FACTORY,
                SqlBabelParserImpl.class.getName() + "#FACTORY")
            .with(CalciteConnectionProperty.CONFORMANCE,
                SqlConformanceEnum.BABEL)
            .connect();
      case "scott-redshift":
        return CalciteAssert.that()
            .with(CalciteAssert.Config.SCOTT)
            .with(CalciteConnectionProperty.FUN, "standard,postgresql,oracle")
            .with(CalciteConnectionProperty.PARSER_FACTORY,
                SqlBabelParserImpl.class.getName() + "#FACTORY")
            .with(CalciteConnectionProperty.CONFORMANCE,
                SqlConformanceEnum.BABEL)
            .with(CalciteConnectionProperty.LENIENT_OPERATOR_LOOKUP, true)
            .connect();
      case "scott-big-query":
        return CalciteAssert.that()
            .with(CalciteAssert.Config.SCOTT)
            .with(CalciteConnectionProperty.FUN, "standard,bigquery")
            .with(CalciteConnectionProperty.PARSER_FACTORY,
                SqlBabelParserImpl.class.getName() + "#FACTORY")
            .with(CalciteConnectionProperty.CONFORMANCE,
                SqlConformanceEnum.BABEL)
            .with(CalciteConnectionProperty.LENIENT_OPERATOR_LOOKUP, true)
            .connect();
      default:
        return super.connect(name, reference);
      }
    }
  };
}
 
Example 30
Source Project: calcite   Source File: SparkRules.java    License: Apache License 2.0 4 votes vote down vote up
public Result implementSpark(Implementor implementor) {
  final JavaTypeFactory typeFactory = implementor.getTypeFactory();
  final BlockBuilder builder = new BlockBuilder();
  final SparkRel child = (SparkRel) getInput();

  final Result result = implementor.visitInput(this, 0, child);

  final PhysType physType =
      PhysTypeImpl.of(
          typeFactory, getRowType(), JavaRowFormat.CUSTOM);

  // final RDD<Employee> inputRdd = <<child adapter>>;
  // return inputRdd.flatMap(
  //   new FlatMapFunction<Employee, X>() {
  //          public List<X> call(Employee e) {
  //              if (!(e.empno < 10)) {
  //                  return Collections.emptyList();
  //              }
  //              return Collections.singletonList(
  //                  new X(...)));
  //          }
  //      })


  Type outputJavaType = physType.getJavaRowType();
  final Type rddType =
      Types.of(
          JavaRDD.class, outputJavaType);
  Type inputJavaType = result.physType.getJavaRowType();
  final Expression inputRdd_ =
      builder.append(
          "inputRdd",
          result.block);

  BlockBuilder builder2 = new BlockBuilder();

  final ParameterExpression e_ =
      Expressions.parameter(inputJavaType, "e");
  if (program.getCondition() != null) {
    Expression condition =
        RexToLixTranslator.translateCondition(
            program,
            typeFactory,
            builder2,
            new RexToLixTranslator.InputGetterImpl(
                Collections.singletonList(
                    Pair.of((Expression) e_, result.physType))),
            null, implementor.getConformance());
    builder2.add(
        Expressions.ifThen(
            Expressions.not(condition),
            Expressions.return_(null,
                Expressions.call(
                    BuiltInMethod.COLLECTIONS_EMPTY_LIST.method))));
  }

  final SqlConformance conformance = SqlConformanceEnum.DEFAULT;
  List<Expression> expressions =
      RexToLixTranslator.translateProjects(
          program,
          typeFactory,
          conformance,
          builder2,
          null,
          DataContext.ROOT,
          new RexToLixTranslator.InputGetterImpl(
              Collections.singletonList(
                  Pair.of((Expression) e_, result.physType))),
          null);
  builder2.add(
      Expressions.return_(null,
          Expressions.convert_(
              Expressions.call(
                  BuiltInMethod.COLLECTIONS_SINGLETON_LIST.method,
                  physType.record(expressions)),
              List.class)));

  final BlockStatement callBody = builder2.toBlock();
  builder.add(
      Expressions.return_(
          null,
          Expressions.call(
              inputRdd_,
              SparkMethod.RDD_FLAT_MAP.method,
              Expressions.lambda(
                  SparkRuntime.CalciteFlatMapFunction.class,
                  callBody,
                  e_))));
  return implementor.result(physType, builder.toBlock());
}