Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.shardingsphere.infra.binder.engine.segment.dml.predicate;

import com.cedarsoftware.util.CaseInsensitiveMap.CaseInsensitiveString;
import com.google.common.collect.LinkedHashMultimap;
import com.google.common.collect.Multimap;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
Expand Down Expand Up @@ -55,10 +56,13 @@ public static HavingSegment bind(final HavingSegment segment, final SQLStatement
private static ExpressionSegment bind(final SQLStatementBinderContext binderContext, final Multimap<CaseInsensitiveString, TableSegmentBinderContext> currentTableBinderContexts,
final Multimap<CaseInsensitiveString, TableSegmentBinderContext> tableBinderContexts,
final Multimap<CaseInsensitiveString, TableSegmentBinderContext> outerTableBinderContexts, final ExpressionSegment expressionSegment) {
Multimap<CaseInsensitiveString, TableSegmentBinderContext> newOuterTableBinderContexts = LinkedHashMultimap.create();
newOuterTableBinderContexts.putAll(outerTableBinderContexts);
newOuterTableBinderContexts.putAll(tableBinderContexts);
try {
return ExpressionSegmentBinder.bind(expressionSegment, SegmentType.HAVING, binderContext, currentTableBinderContexts, outerTableBinderContexts);
return ExpressionSegmentBinder.bind(expressionSegment, SegmentType.HAVING, binderContext, currentTableBinderContexts, newOuterTableBinderContexts);
} catch (final ColumnNotFoundException ignored) {
return ExpressionSegmentBinder.bind(expressionSegment, SegmentType.HAVING, binderContext, tableBinderContexts, outerTableBinderContexts);
return ExpressionSegmentBinder.bind(expressionSegment, SegmentType.HAVING, binderContext, tableBinderContexts, newOuterTableBinderContexts);
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.shardingsphere.infra.binder.engine.segment.dml.predicate;

import com.cedarsoftware.util.CaseInsensitiveMap.CaseInsensitiveString;
import com.google.common.collect.LinkedHashMultimap;
import com.google.common.collect.Multimap;
import org.apache.shardingsphere.infra.binder.engine.segment.dml.from.context.TableSegmentBinderContext;
import org.apache.shardingsphere.infra.binder.engine.statement.SQLStatementBinderContext;
import org.apache.shardingsphere.sql.parser.statement.core.segment.dml.expr.ExpressionSegment;
import org.apache.shardingsphere.sql.parser.statement.core.segment.dml.predicate.HavingSegment;
import org.junit.jupiter.api.Test;

import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.mockito.Mockito.mock;

class HavingSegmentBinderTest {

@Test
void assertBind() {
SQLStatementBinderContext sqlStatementBinderContext = mock(SQLStatementBinderContext.class);
HavingSegment expectedHavingSegment = new HavingSegment(1, 2, mock(ExpressionSegment.class));
Multimap<CaseInsensitiveString, TableSegmentBinderContext> currentTableBinderContexts = LinkedHashMultimap.create();
Multimap<CaseInsensitiveString, TableSegmentBinderContext> tableBinderContexts = LinkedHashMultimap.create();
Multimap<CaseInsensitiveString, TableSegmentBinderContext> outerTableBinderContexts = LinkedHashMultimap.create();
HavingSegment actualHavingSegment = HavingSegmentBinder.bind(expectedHavingSegment, sqlStatementBinderContext, currentTableBinderContexts, tableBinderContexts, outerTableBinderContexts);
assertThat(actualHavingSegment.getStopIndex(), is(expectedHavingSegment.getStopIndex()));
assertThat(actualHavingSegment.getStartIndex(), is(expectedHavingSegment.getStartIndex()));
assertThat(actualHavingSegment.getExpr(), is(expectedHavingSegment.getExpr()));
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.shardingsphere.test.it.sql.binder;

import org.apache.shardingsphere.database.connector.core.type.DatabaseType;
import org.apache.shardingsphere.database.connector.core.type.DatabaseTypeRegistry;
import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext;
import org.apache.shardingsphere.infra.binder.engine.SQLBindEngine;
import org.apache.shardingsphere.infra.config.props.ConfigurationProperties;
import org.apache.shardingsphere.infra.hint.HintValueContext;
import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData;
import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase;
import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData;
import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData;
import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn;
import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema;
import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable;
import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader;
import org.apache.shardingsphere.sql.parser.engine.api.CacheOption;
import org.apache.shardingsphere.sql.parser.engine.api.SQLParserEngine;
import org.apache.shardingsphere.sql.parser.engine.api.SQLStatementVisitorEngine;
import org.apache.shardingsphere.sql.parser.statement.core.statement.SQLStatement;
import org.junit.jupiter.api.Test;

import java.sql.Types;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.Properties;

import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.mockito.Mockito.mock;

class HavingCorrelatedSubqueryBinderIT {

@Test
void assertBindHavingWithCorrelatedSubqueryReferencingOuterColumnPostgreSQL() {
String sql = "SELECT ref_0.user_id AS c_0 FROM t_order AS ref_0 GROUP BY ref_0.user_id "
+ "HAVING 'a' = (SELECT ref_2.status AS c_0 FROM t_order_item AS ref_2 WHERE ref_0.user_id = ref_2.user_id LIMIT 1)";
assertDoesNotThrow(() -> bindSQL("PostgreSQL", sql));
}

@Test
void assertBindHavingWithCorrelatedSubqueryUsingBetweenPostgreSQL() {
String sql = "SELECT ref_0.user_id AS c_0 FROM t_order AS ref_0 GROUP BY ref_0.user_id "
+ "HAVING 'a' = (SELECT ref_2.status AS c_0 FROM t_order_item AS ref_2 WHERE ref_0.user_id BETWEEN ref_2.user_id AND ref_2.user_id LIMIT 1)";
assertDoesNotThrow(() -> bindSQL("PostgreSQL", sql));
}

@Test
void assertBindHavingWithCorrelatedSubqueryReferencingOuterColumnMySQL() {
String sql = "SELECT ref_0.user_id AS c_0 FROM t_order AS ref_0 GROUP BY ref_0.user_id "
+ "HAVING 'a' = (SELECT ref_2.status AS c_0 FROM t_order_item AS ref_2 WHERE ref_0.user_id = ref_2.user_id LIMIT 1)";
assertDoesNotThrow(() -> bindSQL("MySQL", sql));
}

@Test
void assertBindHavingWithCorrelatedSubqueryReferencingOuterColumnOpenGauss() {
String sql = "SELECT ref_0.user_id AS c_0 FROM t_order AS ref_0 GROUP BY ref_0.user_id "
+ "HAVING 'a' = (SELECT ref_2.status AS c_0 FROM t_order_item AS ref_2 WHERE ref_0.user_id = ref_2.user_id LIMIT 1)";
assertDoesNotThrow(() -> bindSQL("openGauss", sql));
}

private SQLStatementContext bindSQL(final String databaseTypeName, final String sql) {
DatabaseType databaseType = TypedSPILoader.getService(DatabaseType.class, databaseTypeName);
SQLStatement sqlStatement = new SQLStatementVisitorEngine(databaseTypeName).visit(new SQLParserEngine(databaseTypeName, new CacheOption(128, 1024L)).parse(sql, false));
SQLStatementContext result = new SQLBindEngine(mockMetaData(databaseType), "foo_db", new HintValueContext()).bind(sqlStatement);
assertThat(result, notNullValue());
return result;
}

private ShardingSphereMetaData mockMetaData(final DatabaseType databaseType) {
Collection<ShardingSphereDatabase> databases = new LinkedList<>();
databases.add(new ShardingSphereDatabase("foo_db", databaseType, mock(ResourceMetaData.class), mock(RuleMetaData.class), mockSchemas(databaseType)));
return new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), mock(RuleMetaData.class), new ConfigurationProperties(new Properties()));
}

private Collection<ShardingSphereSchema> mockSchemas(final DatabaseType databaseType) {
Collection<ShardingSphereSchema> result = new LinkedList<>();
String defaultSchemaName = new DatabaseTypeRegistry(databaseType).getDefaultSchemaName("foo_db");
result.add(new ShardingSphereSchema(defaultSchemaName, mockTables(), Collections.emptyList(), databaseType));
return result;
}

private Collection<ShardingSphereTable> mockTables() {
Collection<ShardingSphereTable> result = new LinkedList<>();
result.add(new ShardingSphereTable("t_order", Arrays.asList(
new ShardingSphereColumn("order_id", Types.BIGINT, true, false, false, true, false, false),
new ShardingSphereColumn("user_id", Types.INTEGER, false, false, false, true, false, false),
new ShardingSphereColumn("status", Types.VARCHAR, false, false, false, true, false, false)),
Collections.emptyList(), Collections.emptyList()));
result.add(new ShardingSphereTable("t_order_item", Arrays.asList(
new ShardingSphereColumn("item_id", Types.BIGINT, true, false, false, true, false, false),
new ShardingSphereColumn("order_id", Types.BIGINT, false, false, false, true, false, false),
new ShardingSphereColumn("user_id", Types.INTEGER, false, false, false, true, false, false),
new ShardingSphereColumn("status", Types.VARCHAR, false, false, false, true, false, false)),
Collections.emptyList(), Collections.emptyList()));
return result;
}
}