Browse Source

Fix `RowDocumentIterator.hasNext`.

This properly fixes the test setup, taking into account that @ActiveProfile by default replaces any profiles set via environment variable or system property.
It turns out that the hasNext calculation in RowDocumentIterator was wrong because

a) isBeforeFirst and isBeforeLast return both false when the ResultSet is empty.
b) isBeforeFirst and isBeforeLast aren't necessarily implemented for all ResultSets and for example DB2s ResultSet implementation don't support it by default.

Closes #1615
pull/1621/head
Jens Schauder 2 years ago committed by Mark Paluch
parent
commit
bc644aa71c
No known key found for this signature in database
GPG Key ID: 4406B84C1661DCD1
  1. 80
      spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/RowDocumentResultSetExtractor.java
  2. 10
      spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/AbstractJdbcAggregateTemplateIntegrationTests.java
  3. 2
      spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/ResultSetTestUtil.java
  4. 76
      spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/CombiningActiveProfileResolver.java
  5. 17
      spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/DataSourceConfiguration.java
  6. 29
      spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-postgres.sql

80
spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/convert/RowDocumentResultSetExtractor.java

@ -38,6 +38,7 @@ import org.springframework.util.LinkedCaseInsensitiveMap;
* {@link ResultSet}-driven extractor to extract {@link RowDocument documents}. * {@link ResultSet}-driven extractor to extract {@link RowDocument documents}.
* *
* @author Mark Paluch * @author Mark Paluch
* @author Jens Schauder
* @since 3.2 * @since 3.2
*/ */
class RowDocumentResultSetExtractor { class RowDocumentResultSetExtractor {
@ -152,18 +153,18 @@ class RowDocumentResultSetExtractor {
private final RelationalPersistentEntity<?> rootEntity; private final RelationalPersistentEntity<?> rootEntity;
private final Integer identifierIndex; private final Integer identifierIndex;
private final AggregateContext<ResultSet> aggregateContext; private final AggregateContext<ResultSet> aggregateContext;
private boolean hasNext;
/**
* Answers the question if the internal {@link ResultSet} points at an actual row. Since when not currently
* extracting a document the {@link ResultSet} points at the next row to be read (or behind all rows), this is
* equivalent to {@literal hasNext()} from the outside.
*/
private boolean pointsAtRow;
RowDocumentIterator(RelationalPersistentEntity<?> entity, ResultSet resultSet) throws SQLException { RowDocumentIterator(RelationalPersistentEntity<?> entity, ResultSet resultSet) throws SQLException {
ResultSetAdapter adapter = ResultSetAdapter.INSTANCE; ResultSetAdapter adapter = ResultSetAdapter.INSTANCE;
if (resultSet.isBeforeFirst()) {
hasNext = resultSet.next();
} else {
hasNext = !resultSet.isAfterLast();
}
this.rootPath = context.getAggregatePath(entity); this.rootPath = context.getAggregatePath(entity);
this.rootEntity = entity; this.rootEntity = entity;
@ -173,11 +174,70 @@ class RowDocumentResultSetExtractor {
this.resultSet = resultSet; this.resultSet = resultSet;
this.identifierIndex = columns.get(idColumn); this.identifierIndex = columns.get(idColumn);
pointsAtRow = pointAtInitialRow();
}
private boolean pointAtInitialRow() throws SQLException {
// If we are before the first row we need to advance to the first row.
try {
if (resultSet.isBeforeFirst()) {
return resultSet.next();
}
} catch (SQLException e) {
// seems that isBeforeFirst is not implemented
}
// if we are after the last row we are done and not pointing a valid row and also can't advance to one.
try {
if (resultSet.isAfterLast()) {
return false;
}
} catch (SQLException e) {
// seems that isAfterLast is not implemented
}
// if we arrived here we know almost nothing.
// maybe isBeforeFirst or isBeforeLast aren't implemented
// or the ResultSet is empty.
boolean peek = peek(resultSet);
if (peek) {
// we can see actual data, so we are looking at a current row.
return true;
}
try {
return resultSet.next();
} catch (SQLException e) {
// we aren't looking at a row, but we can't advance either.
// so it seems we are facing an empty ResultSet
return false;
}
}
/**
* Tries ot access values of the passed in {@link ResultSet} in order to check if it is pointing at an actual row.
*
* @param resultSet to check.
* @return true if values of the {@literal ResultSet} can be accessed and it therefore points to an actual row.
*/
private boolean peek(ResultSet resultSet) {
try {
resultSet.getObject(1);
return true;
} catch (SQLException e) {
return false;
}
} }
@Override @Override
public boolean hasNext() { public boolean hasNext() {
return hasNext; return pointsAtRow;
} }
@Override @Override
@ -197,8 +257,8 @@ class RowDocumentResultSetExtractor {
} }
reader.accept(resultSet); reader.accept(resultSet);
hasNext = resultSet.next(); pointsAtRow = resultSet.next();
} while (hasNext); } while (pointsAtRow);
} catch (SQLException e) { } catch (SQLException e) {
throw new DataRetrievalFailureException("Cannot advance ResultSet", e); throw new DataRetrievalFailureException("Cannot advance ResultSet", e);
} }

10
spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/AbstractJdbcAggregateTemplateIntegrationTests.java

@ -56,6 +56,7 @@ import org.springframework.data.domain.Sort;
import org.springframework.data.jdbc.core.convert.DataAccessStrategy; import org.springframework.data.jdbc.core.convert.DataAccessStrategy;
import org.springframework.data.jdbc.core.convert.JdbcConverter; import org.springframework.data.jdbc.core.convert.JdbcConverter;
import org.springframework.data.jdbc.testing.AssumeFeatureTestExecutionListener; import org.springframework.data.jdbc.testing.AssumeFeatureTestExecutionListener;
import org.springframework.data.jdbc.testing.CombiningActiveProfileResolver;
import org.springframework.data.jdbc.testing.EnabledOnFeature; import org.springframework.data.jdbc.testing.EnabledOnFeature;
import org.springframework.data.jdbc.testing.TestConfiguration; import org.springframework.data.jdbc.testing.TestConfiguration;
import org.springframework.data.jdbc.testing.TestDatabaseFeatures; import org.springframework.data.jdbc.testing.TestDatabaseFeatures;
@ -918,7 +919,7 @@ abstract class AbstractJdbcAggregateTemplateIntegrationTests {
assertThat( assertThat(
jdbcTemplate.queryForObject("SELECT read_only FROM with_read_only", Collections.emptyMap(), String.class)) jdbcTemplate.queryForObject("SELECT read_only FROM with_read_only", Collections.emptyMap(), String.class))
.isEqualTo("from-db"); .isEqualTo("from-db");
} }
@Test @Test
@ -1873,10 +1874,11 @@ abstract class AbstractJdbcAggregateTemplateIntegrationTests {
} }
} }
static class JdbcAggregateTemplateIntegrationTests extends AbstractJdbcAggregateTemplateIntegrationTests { } static class JdbcAggregateTemplateIntegrationTests extends AbstractJdbcAggregateTemplateIntegrationTests {}
@ActiveProfiles(PROFILE_SINGLE_QUERY_LOADING) @ActiveProfiles(value = PROFILE_SINGLE_QUERY_LOADING, resolver = CombiningActiveProfileResolver.class)
static class JdbcAggregateTemplateSingleQueryLoadingIntegrationTests extends AbstractJdbcAggregateTemplateIntegrationTests { static class JdbcAggregateTemplateSingleQueryLoadingIntegrationTests
extends AbstractJdbcAggregateTemplateIntegrationTests {
} }
} }

2
spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/convert/ResultSetTestUtil.java

@ -136,7 +136,7 @@ class ResultSetTestUtil {
} }
private boolean isBeforeFirst() { private boolean isBeforeFirst() {
return index < 0; return index < 0 && !values.isEmpty();
} }
private Object getObject(String column) throws SQLException { private Object getObject(String column) throws SQLException {

76
spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/CombiningActiveProfileResolver.java

@ -0,0 +1,76 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.jdbc.testing;
import java.util.ArrayList;
import org.jetbrains.annotations.NotNull;
import org.springframework.test.context.ActiveProfilesResolver;
import org.springframework.test.context.support.DefaultActiveProfilesResolver;
/**
* A {@link ActiveProfilesResolver} combining the profile configurations from environement, system properties and
* {@link org.springframework.test.context.ActiveProfiles} annotations.
*
* @author Jens Schauder
*/
public class CombiningActiveProfileResolver implements ActiveProfilesResolver {
private static final String SPRING_PROFILES_ACTIVE = "spring.profiles.active";
private final DefaultActiveProfilesResolver defaultActiveProfilesResolver = new DefaultActiveProfilesResolver();
@Override
public String[] resolve(Class<?> testClass) {
ArrayList<Object> combinedProfiles = new ArrayList<>();
for (String profile : defaultActiveProfilesResolver.resolve(testClass)) {
combinedProfiles.add(profile);
}
for (String profile : getSystemProfiles()) {
combinedProfiles.add(profile);
}
for (String profile : getEnvironmentProfiles()) {
combinedProfiles.add(profile);
}
return combinedProfiles.toArray(new String[0]);
}
@NotNull
private static String[] getSystemProfiles() {
if (System.getProperties().containsKey(SPRING_PROFILES_ACTIVE)) {
final String profiles = System.getProperty(SPRING_PROFILES_ACTIVE);
return profiles.split("\\s*,\\s*");
}
return new String[0];
}
private String[] getEnvironmentProfiles() {
if (System.getenv().containsKey(SPRING_PROFILES_ACTIVE)) {
String profiles = System.getenv().get(SPRING_PROFILES_ACTIVE);
return profiles.split("\\s*,\\s*");
}
return new String[0];
}
}

17
spring-data-jdbc/src/test/java/org/springframework/data/jdbc/testing/DataSourceConfiguration.java

@ -18,6 +18,9 @@ package org.springframework.data.jdbc.testing;
import static org.awaitility.pollinterval.FibonacciPollInterval.*; import static org.awaitility.pollinterval.FibonacciPollInterval.*;
import java.sql.Connection; import java.sql.Connection;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import javax.sql.DataSource; import javax.sql.DataSource;
@ -64,7 +67,7 @@ abstract class DataSourceConfiguration {
initializer.setDataSource(dataSource()); initializer.setDataSource(dataSource());
String[] activeProfiles = environment.getActiveProfiles(); String[] activeProfiles = environment.getActiveProfiles();
String profile = activeProfiles.length == 0 ? "" : activeProfiles[0]; String profile = getDatabaseProfile(activeProfiles);
ClassPathResource script = new ClassPathResource(TestUtils.createScriptName(testClass, profile)); ClassPathResource script = new ClassPathResource(TestUtils.createScriptName(testClass, profile));
ResourceDatabasePopulator populator = new ResourceDatabasePopulator(script); ResourceDatabasePopulator populator = new ResourceDatabasePopulator(script);
@ -74,6 +77,18 @@ abstract class DataSourceConfiguration {
return initializer; return initializer;
} }
private static String getDatabaseProfile(String[] activeProfiles) {
List<String> validDbs = Arrays.asList("hsql", "h2", "mysql", "mariadb", "postgres", "db2", "oracle", "mssql");
for (String profile : activeProfiles) {
if (validDbs.contains(profile)) {
return profile;
}
}
return "";
}
/** /**
* Return the {@link DataSource} to be exposed as a Spring bean. * Return the {@link DataSource} to be exposed as a Spring bean.
* *

29
spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.core/JdbcAggregateTemplateIntegrationTests-postgres.sql

@ -4,14 +4,33 @@ DROP TABLE ONE_TO_ONE_PARENT;
DROP TABLE Child_No_Id; DROP TABLE Child_No_Id;
DROP TABLE element_no_id; DROP TABLE element_no_id;
DROP TABLE "LIST_PARENT"; DROP TABLE "LIST_PARENT";
DROP TABLE ARRAY_OWNER; DROP TABLE "ARRAY_OWNER";
DROP TABLE DOUBLE_LIST_OWNER;
DROP TABLE FLOAT_LIST_OWNER;
DROP TABLE BYTE_ARRAY_OWNER; DROP TABLE BYTE_ARRAY_OWNER;
DROP TABLE CHAIN4;
DROP TABLE CHAIN3;
DROP TABLE CHAIN2;
DROP TABLE CHAIN1;
DROP TABLE CHAIN0; DROP TABLE CHAIN0;
DROP TABLE CHAIN1;
DROP TABLE CHAIN2;
DROP TABLE CHAIN3;
DROP TABLE CHAIN4;
DROP TABLE NO_ID_CHAIN0;
DROP TABLE NO_ID_CHAIN1;
DROP TABLE NO_ID_CHAIN2;
DROP TABLE NO_ID_CHAIN3;
DROP TABLE NO_ID_CHAIN4;
DROP TABLE NO_ID_LIST_CHAIN0;
DROP TABLE NO_ID_LIST_CHAIN1;
DROP TABLE NO_ID_LIST_CHAIN2;
DROP TABLE NO_ID_LIST_CHAIN3;
DROP TABLE NO_ID_LIST_CHAIN4;
DROP TABLE NO_ID_MAP_CHAIN0;
DROP TABLE NO_ID_MAP_CHAIN1;
DROP TABLE NO_ID_MAP_CHAIN2;
DROP TABLE NO_ID_MAP_CHAIN3;
DROP TABLE NO_ID_MAP_CHAIN4;
DROP TABLE "VERSIONED_AGGREGATE";
DROP TABLE WITH_READ_ONLY; DROP TABLE WITH_READ_ONLY;
DROP TABLE WITH_LOCAL_DATE_TIME;
DROP TABLE WITH_ID_ONLY; DROP TABLE WITH_ID_ONLY;
DROP TABLE WITH_INSERT_ONLY; DROP TABLE WITH_INSERT_ONLY;

Loading…
Cancel
Save