下面列出了怎么用org.apache.calcite.sql.SqlDialect的API类实例代码及写法,或者点击链接到github查看源代码。
/**
* Used to quote identifiers for JDBC ext job when quoting cc expr
* @param tableDesc
* @param sqlExpr
* @return
*/
public static String quoteIdentifierInSqlExpr(TableDesc tableDesc, String sqlExpr, SqlDialect sqlDialect) {
String table = tableDesc.getName();
boolean tableMatched = false;
List<String> tabPatterns = getTableNameOrAliasPatterns(table);
if (isIdentifierNeedToQuote(sqlExpr, table, tabPatterns)) {
sqlExpr = quoteIdentifier(sqlExpr, table, tabPatterns, sqlDialect);
tableMatched = true;
}
if (tableMatched) {
for (ColumnDesc columnDesc : tableDesc.getColumns()) {
String column = columnDesc.getName();
List<String> colPatterns = getColumnNameOrAliasPatterns(column);
if (isIdentifierNeedToQuote(sqlExpr, column, colPatterns)) {
sqlExpr = quoteIdentifier(sqlExpr, column, colPatterns, sqlDialect);
}
}
}
return sqlExpr;
}
@Test void testRedshiftDialect() throws Exception {
final Tester t = new Tester().foodmart().withEvolve(true)
.withDialect(SqlDialect.DatabaseProduct.REDSHIFT.getDialect())
.withLibrary(SqlLibrary.POSTGRESQL);
final String q0 = "select\n"
+ " CONCAT(\"fname\", ' ', \"lname\") as \"full_name\",\n"
+ " convert_timezone('UTC', 'America/Los_Angeles',\n"
+ " cast('2019-01-01 01:00:00' as timestamp)),\n"
+ " left(\"fname\", 1) as \"initial\",\n"
+ " to_date('2019-01-01', 'YYYY-MM-DD'),\n"
+ " to_timestamp('2019-01-01 01:00:00', 'YYYY-MM-DD HH:MM:SS'),\n"
+ " count(*) as c,\n"
+ " avg(\"total_children\" - \"num_children_at_home\")\n"
+ "from \"customer\" join \"sales_fact_1997\" using (\"customer_id\")\n"
+ "group by \"fname\", \"lname\"";
t.addQuery(q0);
assertThat(t.s.latticeMap.size(), is(1));
}
public String convertSql(String orig) {
// for jdbc source, convert quote from backtick to double quote
String converted = orig.replaceAll("`", "\"");
if (!configurer.skipHandleDefault()) {
String escapedDefault = SqlDialect.CALCITE
.quoteIdentifier(configurer.useUppercaseDefault() ? "DEFAULT" : "default");
converted = converted.replaceAll("(?i)default\\.", escapedDefault + "."); // use Calcite dialect to cater to SqlParser
converted = converted.replaceAll("\"(?i)default\"\\.", escapedDefault + ".");
}
if (!configurer.skipDefaultConvert()) {
try {
SqlNode sqlNode = SqlParser.create(converted).parseQuery();
sqlNode = sqlNode.accept(sqlNodeConverter);
converted = sqlWriter.format(sqlNode);
} catch (Throwable e) {
logger.error("Failed to default convert sql, will use the input: {}", orig, e);
} finally {
sqlWriter.reset();
}
}
converted = configurer.fixAfterDefaultConvert(converted);
return converted;
}
public static JournalledJdbcSchema create(
SchemaPlus parentSchema,
String name,
Map<String, Object> operand
) {
DataSource dataSource;
try {
dataSource = parseDataSource(operand);
} catch (Exception e) {
throw new IllegalArgumentException("Error while reading dataSource", e);
}
String catalog = (String) operand.get("jdbcCatalog");
String schema = (String) operand.get("jdbcSchema");
Expression expression = null;
if (parentSchema != null) {
expression = Schemas.subSchemaExpression(parentSchema, name, JdbcSchema.class);
}
final SqlDialect dialect = createDialect(dataSource);
final JdbcConvention convention = JdbcConvention.of(dialect, expression, name);
return new JournalledJdbcSchema(dataSource, dialect, convention, catalog, schema, operand);
}
public static String getParsedSql(RelNode relNode, SqlDialect dialect) throws SQLException {
if (dialect.getDatabaseProduct() == SqlDialect.DatabaseProduct.HIVE) {
final HepProgram program = new HepProgramBuilder()
.addRuleInstance(JoinCalcTransposeRule.LEFT_CALC)
.addRuleInstance(JoinCalcTransposeRule.RIGHT_CALC)
.addRuleInstance(CalcMergeRule.INSTANCE)
.build();
final RelOptPlanner planner = relNode.getCluster().getPlanner();
final HepPlanner hepPlanner =
new HepPlanner(program, planner.getContext());
hepPlanner.setRoot(relNode);
relNode = hepPlanner.findBestExp();
}
RelToSqlConverter relToSqlConverter = new RelToSqlConverter(dialect);
RelToSqlConverter.Result res = relToSqlConverter.visitChild(0, relNode);
SqlNode sqlNode = res.asQuery();
String result = sqlNode.toSqlString(dialect, false).getSql();
return result.replace("\n", " ");
}
public static JdbcCatalogSchema create(
SchemaPlus parentSchema,
String name,
DataSource dataSource,
SqlDialectFactory dialectFactory,
String catalog) {
final Expression expression =
parentSchema != null
? Schemas.subSchemaExpression(parentSchema, name,
JdbcCatalogSchema.class)
: Expressions.call(DataContext.ROOT,
BuiltInMethod.DATA_CONTEXT_GET_ROOT_SCHEMA.method);
final SqlDialect dialect =
JdbcSchema.createDialect(dialectFactory, dataSource);
final JdbcConvention convention =
JdbcConvention.of(dialect, expression, name);
return new JdbcCatalogSchema(dataSource, dialect, convention, catalog);
}
SqlWriter(Lattice lattice, SqlDialect dialect, StringBuilder buf,
SqlImplementor.SimpleContext context) {
this.lattice = lattice;
this.context = context;
this.buf = buf;
this.dialect = dialect;
}
/**
* Converts a Pig script to a list of SQL statements.
*
* @param pigQuery Pig script
* @param sqlDialect Dialect of SQL language
* @throws IOException Exception during parsing or translating Pig
*/
public List<String> pigToSql(String pigQuery, SqlDialect sqlDialect)
throws IOException {
final SqlWriterConfig config = SqlPrettyWriter.config()
.withQuoteAllIdentifiers(false)
.withAlwaysUseParentheses(false)
.withSelectListItemsOnSeparateLines(false)
.withIndentation(2)
.withDialect(sqlDialect);
final SqlPrettyWriter writer = new SqlPrettyWriter(config);
return pigToSql(pigQuery, writer);
}
private JdbcSchema(DataSource dataSource, SqlDialect dialect,
JdbcConvention convention, String catalog, String schema,
ImmutableMap<String, JdbcTable> tableMap) {
this.dataSource = Objects.requireNonNull(dataSource);
this.dialect = Objects.requireNonNull(dialect);
this.convention = convention;
this.catalog = catalog;
this.schema = schema;
this.tableMap = tableMap;
this.snapshot = tableMap != null;
}
protected void generateTypeString(StringBuilder sb, boolean withDetail) {
sb.append("INTERVAL ");
final SqlDialect dialect = AnsiSqlDialect.DEFAULT;
final SqlWriterConfig config = SqlPrettyWriter.config()
.withAlwaysUseParentheses(false)
.withSelectListItemsOnSeparateLines(false)
.withIndentation(0)
.withDialect(dialect);
final SqlPrettyWriter writer = new SqlPrettyWriter(config);
intervalQualifier.unparse(writer, 0, 0);
final String sql = writer.toString();
sb.append(new SqlString(dialect, sql).getSql());
}
/** Similar to {@link #testHoist()} but using custom parser. */
@Test void testHoistMySql() {
// SQL contains back-ticks, which require MySQL's quoting,
// and DATEADD, which requires Babel.
final String sql = "select 1 as x,\n"
+ " 'ab' || 'c' as y\n"
+ "from `my emp` /* comment with 'quoted string'? */ as e\n"
+ "where deptno < 40\n"
+ "and DATEADD(day, 1, hiredate) > date '2010-05-06'";
final SqlDialect dialect = MysqlSqlDialect.DEFAULT;
final Hoist.Hoisted hoisted =
Hoist.create(Hoist.config()
.withParserConfig(
dialect.configureParser(SqlParser.configBuilder())
.setParserFactory(SqlBabelParserImpl::new)
.build()))
.hoist(sql);
// Simple toString converts each variable to '?N'
final String expected = "select ?0 as x,\n"
+ " ?1 || ?2 as y\n"
+ "from `my emp` /* comment with 'quoted string'? */ as e\n"
+ "where deptno < ?3\n"
+ "and DATEADD(day, ?4, hiredate) > ?5";
assertThat(hoisted.toString(), is(expected));
// Custom string converts variables to '[N:TYPE:VALUE]'
final String expected2 = "select [0:DECIMAL:1] as x,\n"
+ " [1:CHAR:ab] || [2:CHAR:c] as y\n"
+ "from `my emp` /* comment with 'quoted string'? */ as e\n"
+ "where deptno < [3:DECIMAL:40]\n"
+ "and DATEADD(day, [4:DECIMAL:1], hiredate) > [5:DATE:2010-05-06]";
assertThat(hoisted.substitute(SqlParserTest::varToStr), is(expected2));
}
/**
* Creates a SqlString. The SQL might contain dynamic parameters, dynamicParameters
* designate the order of the parameters.
*
* @param sql text
* @param dynamicParameters indices
*/
public SqlString(SqlDialect dialect, String sql, ImmutableList<Integer> dynamicParameters) {
this.dialect = dialect;
this.sql = sql;
this.dynamicParameters = dynamicParameters;
assert sql != null : "sql must be NOT null";
assert dialect != null : "dialect must be NOT null";
}
protected void generateTypeString(StringBuilder sb, boolean withDetail) {
sb.append("INTERVAL ");
final SqlDialect dialect = AnsiSqlDialect.DEFAULT;
final SqlPrettyWriter writer = new SqlPrettyWriter(dialect);
writer.setAlwaysUseParentheses(false);
writer.setSelectListItemsOnSeparateLines(false);
writer.setIndentation(0);
intervalQualifier.unparse(writer, 0, 0);
final String sql = writer.toString();
sb.append(new SqlString(dialect, sql).getSql());
}
private static void appendWhereStatement(IJoinedFlatTableDesc flatDesc, StringBuilder sql, boolean singleLine, SqlDialect sqlDialect) {
final String sep = singleLine ? " " : "\n";
StringBuilder whereBuilder = new StringBuilder();
whereBuilder.append("WHERE 1=1");
DataModelDesc model = flatDesc.getDataModel();
if (StringUtils.isNotEmpty(model.getFilterCondition())) {
String filterCondition = model.getFilterCondition();
if (flatDesc.getSegment() != null) {
JoinedFormatter formatter = new JoinedFormatter(flatDesc);
filterCondition = formatter.formatSentence(model.getFilterCondition());
}
String quotedFilterCondition = quoteIdentifierInSqlExpr(flatDesc, filterCondition, null);
whereBuilder.append(" AND (").append(quotedFilterCondition).append(") "); // -> filter condition contains special character may cause bug
}
if (flatDesc.getSegment() != null) {
PartitionDesc partDesc = model.getPartitionDesc();
if (partDesc != null && partDesc.getPartitionDateColumn() != null) {
SegmentRange segRange = flatDesc.getSegRange();
if (segRange != null && !segRange.isInfinite()) {
whereBuilder.append(" AND (");
String quotedPartitionCond = quoteIdentifierInSqlExpr(flatDesc,
partDesc.getPartitionConditionBuilder().buildDateRangeCondition(partDesc, flatDesc.getSegment(), segRange, null), sqlDialect);
whereBuilder.append(quotedPartitionCond);
whereBuilder.append(")" + sep);
}
}
}
sql.append(whereBuilder.toString());
}
@Deprecated
public SqlPrettyWriter(
SqlDialect dialect,
boolean alwaysUseParentheses,
PrintWriter pw) {
// NOTE that 'pw' is ignored; there is no place for it in the new API
this(config().withDialect(Objects.requireNonNull(dialect))
.withAlwaysUseParentheses(alwaysUseParentheses));
}
/**
* If KylinConfig#enableHiveDdlQuote return false, disable quote.
* If SqlDialect is specific, use it; else use the KylinConfig#getFactTableDialect to quote identifier.
*/
public static String quoteIdentifier(String identifier, SqlDialect specificSqlDialect) {
if (!kylinConfig.enableHiveDdlQuote()) {
return identifier;
}
if (specificSqlDialect != null) {
return specificSqlDialect.quoteIdentifier(identifier);
}
setQuote();
return defaultDialect.quoteIdentifier(identifier);
}
private String getQuotedSqlString(SqlNode sqlNode) {
SqlParser.Config parserConfig = flinkPlanner.config().getParserConfig();
SqlDialect dialect = new CalciteSqlDialect(SqlDialect.EMPTY_CONTEXT
.withQuotedCasing(parserConfig.unquotedCasing())
.withConformance(parserConfig.conformance())
.withUnquotedCasing(parserConfig.unquotedCasing())
.withIdentifierQuoteString(parserConfig.quoting().string));
return sqlNode.toSqlString(dialect).getSql();
}
/** Removes cast from string.
*
* <p>For example, {@code x > CAST('2015-01-07' AS DATE)}
* becomes {@code x > '2015-01-07'}.
*/
private static RexNode stripCastFromString(RexNode node, SqlDialect dialect) {
switch (node.getKind()) {
case EQUALS:
case IS_NOT_DISTINCT_FROM:
case NOT_EQUALS:
case GREATER_THAN:
case GREATER_THAN_OR_EQUAL:
case LESS_THAN:
case LESS_THAN_OR_EQUAL:
final RexCall call = (RexCall) node;
final RexNode o0 = call.operands.get(0);
final RexNode o1 = call.operands.get(1);
if (o0.getKind() == SqlKind.CAST
&& o1.getKind() != SqlKind.CAST) {
if (!dialect.supportsImplicitTypeCoercion((RexCall) o0)) {
// If the dialect does not support implicit type coercion,
// we definitely can not strip the cast.
return node;
}
final RexNode o0b = ((RexCall) o0).getOperands().get(0);
return call.clone(call.getType(), ImmutableList.of(o0b, o1));
}
if (o1.getKind() == SqlKind.CAST
&& o0.getKind() != SqlKind.CAST) {
if (!dialect.supportsImplicitTypeCoercion((RexCall) o1)) {
return node;
}
final RexNode o1b = ((RexCall) o1).getOperands().get(0);
return call.clone(call.getType(), ImmutableList.of(o0, o1b));
}
}
return node;
}
static String quoteIdentifier(String sqlExpr, String identifier, List<String> identifierPatterns, SqlDialect sqlDialect) {
String quotedIdentifier = quoteIdentifier(identifier.trim(), sqlDialect);
for (String pattern : identifierPatterns) {
Matcher matcher = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE | Pattern.DOTALL).matcher(sqlExpr);
if (matcher.find()) {
sqlExpr = matcher.replaceAll("$1" + quotedIdentifier + "$3");
}
}
return sqlExpr;
}
@Test
public void testQuoteWithIdentifier() {
Assert.assertEquals("`abc`", FlatTableSqlQuoteUtils.quoteIdentifier("abc", null));
Assert.assertEquals("abc", FlatTableSqlQuoteUtils.quoteIdentifier("abc", FlatTableSqlQuoteUtils.NON_QUOTE_DIALECT));
Assert.assertEquals("\"abc\"", FlatTableSqlQuoteUtils.quoteIdentifier("abc", SqlDialect.DatabaseProduct.POSTGRESQL.getDialect()));
Assert.assertEquals("`abc`", FlatTableSqlQuoteUtils.quoteIdentifier("abc", FlatTableSqlQuoteUtils.HIVE_DIALECT));
Assert.assertEquals("[abc]", FlatTableSqlQuoteUtils.quoteIdentifier("abc", SqlDialect.DatabaseProduct.MSSQL.getDialect()));
Assert.assertEquals("`abc`", FlatTableSqlQuoteUtils.quoteIdentifier("abc", SqlDialect.DatabaseProduct.MYSQL.getDialect()));
Assert.assertEquals("`abc`", FlatTableSqlQuoteUtils.quoteIdentifier(SourceDialect.MYSQL, "abc"));
Assert.assertEquals("`abc`", FlatTableSqlQuoteUtils.quoteIdentifier(SourceDialect.HIVE, "abc"));
Assert.assertEquals("[abc]", FlatTableSqlQuoteUtils.quoteIdentifier(SourceDialect.MSSQL, "abc"));
Assert.assertEquals("\"abc\"", FlatTableSqlQuoteUtils.quoteIdentifier(SourceDialect.POSTGRESQL, "abc"));
}
@Override
public List<String> getFieldNames() {
for (SqlNode fieldNode : insertFields.getList()) {
if (!(fieldNode instanceof SqlIdentifier)) {
throw SqlExceptionHelper.parseError("Column type specified", this.toSqlString(new SqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql(),
fieldNode.getParserPosition()).buildSilently();
}
}
return insertFields.getList().stream().map(SqlNode::toString).collect(Collectors.toList());
}
/**
* Creates a SqlString. The SQL might contain dynamic parameters, dynamicParameters
* designate the order of the parameters.
*
* @param sql text
* @param dynamicParameters indices
*/
public SqlString(SqlDialect dialect, String sql, ImmutableList<Integer> dynamicParameters) {
this.dialect = dialect;
this.sql = sql;
this.dynamicParameters = dynamicParameters;
assert sql != null : "sql must be NOT null";
assert dialect != null : "dialect must be NOT null";
}
public int executeCreateView(SqlCreateQuarkView sqlNode) throws SQLException {
DBI dbi = getDbi();
List<String> tableNameList = sqlNode.getTableName().names;
String dataSourceName = tableNameList.get(0);
ViewDAO viewDAO = dbi.onDemand(ViewDAO.class);
JdbcSourceDAO jdbcDAO = dbi.onDemand(JdbcSourceDAO.class);
QuboleDbSourceDAO quboleDAO = dbi.onDemand(QuboleDbSourceDAO.class);
DataSource dataSource = jdbcDAO.findByName(dataSourceName,
connection.getDSSet().getId());
if (dataSource == null) {
dataSource = quboleDAO.findByName(dataSourceName, connection.getDSSet().getId());
}
if (dataSource == null) {
throw new SQLException("DataSource with name '" + dataSourceName + "' not found");
}
SqlPrettyWriter writer = new SqlPrettyWriter(SqlDialect.CALCITE);
writer.setAlwaysUseParentheses(false);
writer.setSelectListItemsOnSeparateLines(false);
writer.setIndentation(0);
writer.setQuoteAllIdentifiers(true);
sqlNode.getQuery().unparse(writer, 0, 0);
final String sql = writer.toString();
LOG.debug(sql);
return viewDAO.insert(sqlNode.getName(),
"No Description", sql,
0L, dataSource.getId(),
tableNameList.get(1), tableNameList.get(2),
connection.getDSSet().getId());
}
@Test
public void testOptWithJoin() throws Exception {
String sql = "select\n" +
"qh.created_at as dt, \n" +
"count(qh.id) as num_queries\n" +
"\n" +
"from test_hist qh\n" +
"join uinfo ui\n" +
" on qh.qbol_user_id = ui.qu_id\n" +
"join acc externals\n" +
" on externals.id = ui.a_id\n" +
"\n" +
"where qh.timeout >= 350\n" +
"and command_type = 'HiveCommand'\n" +
"and qlog like '%\\\"HIVE_VERSION\\\":\\\"1.2\\\"%'\n" +
"and customer_name like 'amogh'\n" +
"\n" +
"group by \n" +
"qh.created_at\n" +
"\n" +
"order by dt asc";
SqlQueryParser parser = new SqlQueryParser(connInfo);
final SqlQueryParser.SqlQueryParserResult result = parser.parse(sql);
final String hiveQuery = ResultProcessor.getParsedSql(result.getRelNode(),
SqlDialect.DatabaseProduct.HIVE.getDialect());
assertEquals("SELECT CREATED_AT, COUNT(ID) NUM_QUERIES FROM " +
"PUBLIC.TEST_HIST_PARTITION INNER JOIN H2.PUBLIC.UINFO " +
"ON TEST_HIST_PARTITION.QBOL_USER_ID = UINFO.QU_ID " +
"INNER JOIN H2.PUBLIC.ACC ON UINFO.A_ID = ACC.ID " +
"WHERE ACC.CUSTOMER_NAME LIKE 'amogh' AND " +
"(TEST_HIST_PARTITION.TIMEOUT >= 350 " +
"AND TEST_HIST_PARTITION.COMMAND_TYPE = 'HiveCommand' " +
"AND TEST_HIST_PARTITION.QLOG " +
"LIKE '%\\\"HIVE_VERSION\\\":\\\"1.2\\\"%') " +
"GROUP BY CREATED_AT ORDER BY CREATED_AT", hiveQuery);
}
@Deprecated
public SqlPrettyWriter(
SqlDialect dialect,
boolean alwaysUseParentheses) {
this(config().withDialect(Objects.requireNonNull(dialect))
.withAlwaysUseParentheses(alwaysUseParentheses));
}
private String getQuotedSqlString(SqlNode sqlNode) {
SqlParser.Config parserConfig = flinkPlanner.config().getParserConfig();
SqlDialect dialect = new CalciteSqlDialect(SqlDialect.EMPTY_CONTEXT
.withQuotedCasing(parserConfig.unquotedCasing())
.withConformance(parserConfig.conformance())
.withUnquotedCasing(parserConfig.unquotedCasing())
.withIdentifierQuoteString(parserConfig.quoting().string));
return sqlNode.toSqlString(dialect).getSql();
}
@Ignore
@Test
public void testNoOptWithHiveOp() throws Exception {
String sql = "select\n" +
"to_date(qh.created_at) as dt, \n" +
"count(qh.id) as num_queries\n" +
"\n" +
"from test_hist qh\n" +
"join uinfo ui\n" +
" on qh.qbol_user_id = ui.qu_id\n" +
"join acc externals\n" +
" on externals.id = ui.a_id\n" +
"\n" +
"where to_date(qh.created_at) >= date_sub(from_unixtime(unix_timestamp()),150)\n" +
"and command_type = 'HiveCommand'\n" +
"and qlog like '%\\\"HIVE_VERSION\\\":\\\"1.2\\\"%'\n" +
"and customer_name like 'amogh'\n" +
"\n" +
"group by \n" +
"to_date(qh.created_at)\n" +
"\n" +
"order by dt asc";
SqlQueryParser parser = new SqlQueryParser(connInfo);
final SqlQueryParser.SqlQueryParserResult result = parser.parse(sql);
final String hiveQuery = ResultProcessor.getParsedSql(result.getRelNode(),
SqlDialect.DatabaseProduct.HIVE.getDialect());
assertEquals("SELECT TO_DATE(TEST_HIST.CREATED_AT) DT, "
+ "COUNT(TEST_HIST.ID) NUM_QUERIES "
+ "FROM H2.PUBLIC.TEST_HIST INNER JOIN H2.PUBLIC.UINFO "
+ "ON TEST_HIST.QBOL_USER_ID = UINFO.QU_ID INNER JOIN H2.PUBLIC.ACC "
+ "ON UINFO.A_ID = ACC.ID "
+ "WHERE ACC.CUSTOMER_NAME LIKE 'amogh' AND "
+ "(TO_DATE(TEST_HIST.CREATED_AT) >= "
+ "DATE_SUB(FROM_UNIXTIME(UNIX_TIMESTAMP()), 150) "
+ "AND TEST_HIST.COMMAND_TYPE = 'HiveCommand' "
+ "AND TEST_HIST.QLOG LIKE '%\\\"HIVE_VERSION\\\":\\\"1.2\\\"%') "
+ "GROUP BY TO_DATE(TEST_HIST.CREATED_AT) "
+ "ORDER BY DT", hiveQuery);
}
public double tableCardinality(RelOptTable table) {
final SqlDialect dialect = table.unwrap(SqlDialect.class);
final DataSource dataSource = table.unwrap(DataSource.class);
return withBuilder(
(cluster, relOptSchema, relBuilder) -> {
// Generate:
// SELECT COUNT(*) FROM `EMP`
relBuilder.push(table.toRel(ViewExpanders.simpleContext(cluster)))
.aggregate(relBuilder.groupKey(), relBuilder.count());
final String sql = toSql(relBuilder.build(), dialect);
try (Connection connection = dataSource.getConnection();
Statement statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(sql)) {
if (!resultSet.next()) {
throw new AssertionError("expected exactly 1 row: " + sql);
}
final double cardinality = resultSet.getDouble(1);
if (resultSet.next()) {
throw new AssertionError("expected exactly 1 row: " + sql);
}
return cardinality;
} catch (SQLException e) {
throw handle(e, sql);
}
});
}
public boolean isKey(RelOptTable table, List<Integer> columns) {
final SqlDialect dialect = table.unwrap(SqlDialect.class);
final DataSource dataSource = table.unwrap(DataSource.class);
return withBuilder(
(cluster, relOptSchema, relBuilder) -> {
// The collection of columns ['DEPTNO'] is a key for 'EMP' if the
// following query returns no rows:
//
// SELECT 1
// FROM `EMP`
// GROUP BY `DEPTNO`
// HAVING COUNT(*) > 1
//
final RelOptTable.ToRelContext toRelContext =
ViewExpanders.simpleContext(cluster);
relBuilder.push(table.toRel(toRelContext))
.aggregate(relBuilder.groupKey(relBuilder.fields(columns)),
relBuilder.count())
.filter(
relBuilder.call(SqlStdOperatorTable.GREATER_THAN,
Util.last(relBuilder.fields()), relBuilder.literal(1)));
final String sql = toSql(relBuilder.build(), dialect);
try (Connection connection = dataSource.getConnection();
Statement statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(sql)) {
return !resultSet.next();
} catch (SQLException e) {
throw handle(e, sql);
}
});
}
@Test
public void testSqlParsingOfLimitClauseForRedShift() throws QuarkException, SQLException, SqlParseException {
String query = "select product_id from product limit 100 offset 10";
final SqlDialect redshiftDialect =
SqlDialect.getProduct("REDSHIFT", null).getDialect();
QuarkTestUtil.checkSqlParsing(
query,
info,
"SELECT \"PRODUCT_ID\" "
+ "FROM \"PRODUCT\" "
+ "LIMIT 100 OFFSET 10",
redshiftDialect);
}