Browse Source

Polishing.

Reformat code, switch to tabs. Accept property in DatabaseTypeMapping to provide more context to the type mapping component.

Rename LiquibaseChangeSetGenerator to …Writer as we're writing a changeset and computing the contents is a consequence of writing a changeset. Refine naming to express what we're actually doing.

Introduce setters for enhanced configuration of predicates. Reduce visibility of types to avoid unwanted public API where public access is not needed.

Remove usused code, move methods around for improved grouping of code.

Rename package to schema as the schema is being created and updated and not generated. Rename …Model classes to just their name as types are package-private and not visible externally. Refactor SchemaDiff to Java record.

Use different overloads to write schema changes to avoid LiquibaseException leaking into cases where no diff is being used. Introduce SchemaFilter to filter unwanted mapped entities.

Move code to JDBC module. Introduce comparator strategy to customize how table and column names are compared.

See #756
Original pull request: #1520
pull/1526/head
Mark Paluch 3 years ago
parent
commit
b2950bf133
No known key found for this signature in database
GPG Key ID: 4406B84C1661DCD1
  1. 1
      spring-data-jdbc/pom.xml
  2. 15
      spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Column.java
  3. 66
      spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/DefaultSqlTypeMapping.java
  4. 596
      spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriter.java
  5. 147
      spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/SchemaDiff.java
  6. 101
      spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/SqlTypeMapping.java
  7. 65
      spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Table.java
  8. 21
      spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/TableDiff.java
  9. 79
      spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Tables.java
  10. 7
      spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/package-info.java
  11. 249
      spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriterIntegrationTests.java
  12. 91
      spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriterUnitTests.java
  13. 90
      spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/SchemaDiffUnitTests.java
  14. 68
      spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/SqlTypeMappingUnitTests.java
  15. 16
      spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/changelog.yml
  16. 5
      spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/person-with-id-and-name.sql
  17. 4
      spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/unused-table.sql
  18. 3
      spring-data-relational/pom.xml
  19. 2
      spring-data-relational/src/main/java/org/springframework/data/relational/core/mapping/RelationalMappingContext.java
  20. 47
      spring-data-relational/src/main/java/org/springframework/data/relational/core/mapping/schemasqlgeneration/DefaultDatabaseTypeMapping.java
  21. 351
      spring-data-relational/src/main/java/org/springframework/data/relational/core/mapping/schemasqlgeneration/LiquibaseChangeSetGenerator.java
  22. 99
      spring-data-relational/src/main/java/org/springframework/data/relational/core/mapping/schemasqlgeneration/SchemaDiff.java
  23. 83
      spring-data-relational/src/main/java/org/springframework/data/relational/core/mapping/schemasqlgeneration/SchemaModel.java
  24. 21
      spring-data-relational/src/main/java/org/springframework/data/relational/core/mapping/schemasqlgeneration/TableDiff.java
  25. 69
      spring-data-relational/src/main/java/org/springframework/data/relational/core/mapping/schemasqlgeneration/TableModel.java
  26. 98
      spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/SchemaModelTests.java

1
spring-data-jdbc/pom.xml

@ -226,7 +226,6 @@ @@ -226,7 +226,6 @@
<groupId>org.liquibase</groupId>
<artifactId>liquibase-core</artifactId>
<version>${liquibase.version}</version>
<scope>compile</scope>
<optional>true</optional>
</dependency>

15
spring-data-relational/src/main/java/org/springframework/data/relational/core/mapping/schemasqlgeneration/ColumnModel.java → spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Column.java

@ -13,30 +13,27 @@ @@ -13,30 +13,27 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.relational.core.mapping.schemasqlgeneration;
import org.springframework.data.relational.core.sql.SqlIdentifier;
package org.springframework.data.jdbc.core.mapping.schema;
import java.util.Objects;
/**
* Models a Column for generating SQL for Schema generation.
*
* @author Kurt Niemi
* @since 3.2
*/
public record ColumnModel(String name, String type, boolean nullable, boolean identityColumn) {
record Column(String name, String type, boolean nullable, boolean identity) {
public ColumnModel(String name, String type) {
this(name, type, false, false);
}
public Column(String name, String type) {
this(name, type, false, false);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ColumnModel that = (ColumnModel) o;
Column that = (Column) o;
return Objects.equals(name, that.name);
}

66
spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/DefaultSqlTypeMapping.java

@ -0,0 +1,66 @@ @@ -0,0 +1,66 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.jdbc.core.mapping.schema;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZonedDateTime;
import java.util.HashMap;
import java.util.UUID;
import org.springframework.data.relational.core.mapping.RelationalPersistentProperty;
import org.springframework.util.ClassUtils;
/**
* Class that provides a default implementation of mapping Java type to a Database type. To customize the mapping an
* instance of a class implementing {@link SqlTypeMapping} interface can be set on the {@link Tables} class
*
* @author Kurt Niemi
* @since 3.2
*/
public class DefaultSqlTypeMapping implements SqlTypeMapping {
private final HashMap<Class<?>, String> typeMap = new HashMap<>();
public DefaultSqlTypeMapping() {
typeMap.put(String.class, "VARCHAR(255 BYTE)");
typeMap.put(Boolean.class, "TINYINT");
typeMap.put(Double.class, "DOUBLE");
typeMap.put(Float.class, "FLOAT");
typeMap.put(Integer.class, "INT");
typeMap.put(Long.class, "BIGINT");
typeMap.put(BigInteger.class, "BIGINT");
typeMap.put(BigDecimal.class, "NUMERIC");
typeMap.put(UUID.class, "UUID");
typeMap.put(LocalDate.class, "DATE");
typeMap.put(LocalTime.class, "TIME");
typeMap.put(LocalDateTime.class, "TIMESTAMP");
typeMap.put(ZonedDateTime.class, "TIMESTAMPTZ");
}
@Override
public String getColumnType(RelationalPersistentProperty property) {
return typeMap.get(ClassUtils.resolvePrimitiveIfNecessary(property.getActualType()));
}
}

596
spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriter.java

@ -0,0 +1,596 @@ @@ -0,0 +1,596 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.jdbc.core.mapping.schema;
import liquibase.CatalogAndSchema;
import liquibase.change.AddColumnConfig;
import liquibase.change.ColumnConfig;
import liquibase.change.ConstraintsConfig;
import liquibase.change.core.AddColumnChange;
import liquibase.change.core.CreateTableChange;
import liquibase.change.core.DropColumnChange;
import liquibase.change.core.DropTableChange;
import liquibase.changelog.ChangeLogChild;
import liquibase.changelog.ChangeLogParameters;
import liquibase.changelog.ChangeSet;
import liquibase.changelog.DatabaseChangeLog;
import liquibase.database.Database;
import liquibase.exception.ChangeLogParseException;
import liquibase.exception.LiquibaseException;
import liquibase.parser.ChangeLogParser;
import liquibase.parser.core.yaml.YamlChangeLogParser;
import liquibase.resource.DirectoryResourceAccessor;
import liquibase.serializer.ChangeLogSerializer;
import liquibase.serializer.core.yaml.YamlChangeLogSerializer;
import liquibase.snapshot.DatabaseSnapshot;
import liquibase.snapshot.SnapshotControl;
import liquibase.snapshot.SnapshotGeneratorFactory;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.text.Collator;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.function.BiPredicate;
import java.util.function.Predicate;
import org.springframework.core.io.Resource;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.relational.core.mapping.RelationalPersistentEntity;
import org.springframework.data.relational.core.mapping.RelationalPersistentProperty;
import org.springframework.data.util.Predicates;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
/**
* Use this class to write Liquibase ChangeSets.
* <p>
* This writer uses {@link MappingContext} as input to determine mapped entities. Entities can be filtered through a
* {@link #setSchemaFilter(Predicate) schema filter} to include/exclude entities. By default, all entities within the
* mapping context are considered for computing the expected schema.
* <p>
* This writer operates in two modes:
* <ul>
* <li>Initial Schema Creation</li>
* <li>Differential Schema Change Creation</li>
* </ul>
* The {@link #writeChangeSet(Resource) initial mode} allows creating the full schema without considering any existing
* tables. The {@link #writeChangeSet(Resource, Database) differential schema mode} uses a {@link Database} object to
* determine existing tables and columns. It creates in addition to table creations also changes to drop tables, drop
* columns and add columns. By default, the {@link #setDropTableFilter(Predicate) DROP TABLE} and the
* {@link #setDropColumnFilter(BiPredicate) DROP COLUMN} filters exclude all tables respective columns from being
* dropped.
* <p>
* In differential schema mode, table and column names are compared using a case-insensitive comparator, see
* {@link Collator#PRIMARY}.
* <p>
* The writer can be configured to use specific ChangeLogSerializers and ChangeLogParsers defaulting to YAML.
*
* @author Kurt Niemi
* @author Mark Paluch
* @since 3.2
*/
public class LiquibaseChangeSetWriter {
public static final String DEFAULT_AUTHOR = "Spring Data Relational";
private final MappingContext<? extends RelationalPersistentEntity<?>, ? extends RelationalPersistentProperty> mappingContext;
private SqlTypeMapping sqlTypeMapping = new DefaultSqlTypeMapping();
private ChangeLogSerializer changeLogSerializer = new YamlChangeLogSerializer();
private ChangeLogParser changeLogParser = new YamlChangeLogParser();
/**
* Predicate to identify Liquibase system tables.
*/
private final Predicate<String> isLiquibaseTable = table -> table.toUpperCase(Locale.ROOT)
.startsWith("DATABASECHANGELOG");
/**
* Comparator to compare table and column names.
*/
private final Comparator<String> nameComparator = createComparator();
private static Comparator<String> createComparator() {
Collator instance = Collator.getInstance(Locale.ROOT);
instance.setStrength(Collator.PRIMARY);
return instance::compare;
}
/**
* Filter predicate to determine which persistent entities should be used for schema generation.
*/
private Predicate<RelationalPersistentEntity<?>> schemaFilter = Predicates.isTrue();
/**
* Filter predicate used to determine whether an existing table should be removed. Defaults to {@code false} to keep
* existing tables.
*/
private Predicate<String> dropTableFilter = Predicates.isFalse();
/**
* Filter predicate used to determine whether an existing column should be removed. Defaults to {@code false} to keep
* existing columns.
*/
private BiPredicate<String, String> dropColumnFilter = (table, column) -> false;
/**
* Use this to generate a ChangeSet that can be used on an empty database.
*
* @param mappingContext source to determine persistent entities, must not be {@literal null}.
*/
public LiquibaseChangeSetWriter(
MappingContext<? extends RelationalPersistentEntity<?>, ? extends RelationalPersistentProperty> mappingContext) {
Assert.notNull(mappingContext, "MappingContext must not be null");
this.mappingContext = mappingContext;
}
/**
* Configure SQL type mapping. Defaults to {@link DefaultSqlTypeMapping}.
*
* @param sqlTypeMapping must not be {@literal null}.
*/
public void setSqlTypeMapping(SqlTypeMapping sqlTypeMapping) {
Assert.notNull(sqlTypeMapping, "SqlTypeMapping must not be null");
this.sqlTypeMapping = sqlTypeMapping;
}
/**
* Set the {@link ChangeLogSerializer}.
*
* @param changeLogSerializer must not be {@literal null}.
*/
public void setChangeLogSerializer(ChangeLogSerializer changeLogSerializer) {
Assert.notNull(changeLogSerializer, "ChangeLogSerializer must not be null");
this.changeLogSerializer = changeLogSerializer;
}
/**
* Set the {@link ChangeLogParser}.
*
* @param changeLogParser must not be {@literal null}.
*/
public void setChangeLogParser(ChangeLogParser changeLogParser) {
Assert.notNull(changeLogParser, "ChangeLogParser must not be null");
this.changeLogParser = changeLogParser;
}
/**
* Set the filter predicate to identify for which entities to create schema definitions. Existing tables for excluded
* entities will show up in {@link #setDropTableFilter(Predicate)}. Returning {@code true} includes the entity;
* {@code false} excludes the entity from schema creation.
*
* @param schemaFilter must not be {@literal null}.
*/
public void setSchemaFilter(Predicate<RelationalPersistentEntity<?>> schemaFilter) {
Assert.notNull(schemaFilter, "Schema filter must not be null");
this.schemaFilter = schemaFilter;
}
/**
* Set the filter predicate to identify tables to drop. The predicate accepts the table name. Returning {@code true}
* will delete the table; {@code false} retains the table.
*
* @param dropTableFilter must not be {@literal null}.
*/
public void setDropTableFilter(Predicate<String> dropTableFilter) {
Assert.notNull(dropTableFilter, "Drop Column filter must not be null");
this.dropTableFilter = dropTableFilter;
}
/**
* Set the filter predicate to identify columns within a table to drop. The predicate accepts the table- and column
* name. Returning {@code true} will delete the column; {@code false} retains the column.
*
* @param dropColumnFilter must not be {@literal null}.
*/
public void setDropColumnFilter(BiPredicate<String, String> dropColumnFilter) {
Assert.notNull(dropColumnFilter, "Drop Column filter must not be null");
this.dropColumnFilter = dropColumnFilter;
}
/**
* Write a Liquibase ChangeSet containing all tables as initial ChangeSet.
*
* @param changeLogResource resource that ChangeSet will be written to (or append to an existing ChangeSet file). The
* resource must resolve to a valid {@link Resource#getFile()}.
* @throws IOException in case of I/O errors.
*/
public void writeChangeSet(Resource changeLogResource) throws IOException {
writeChangeSet(changeLogResource, ChangeSetMetadata.create());
}
/**
* Write a Liquibase ChangeSet using a {@link Database} to identify the differences between mapped entities and the
* existing database.
*
* @param changeLogResource resource that ChangeSet will be written to (or append to an existing ChangeSet file). The
* resource must resolve to a valid {@link Resource#getFile()}.
* @param database database to identify the differences.
* @throws LiquibaseException
* @throws IOException in case of I/O errors.
*/
public void writeChangeSet(Resource changeLogResource, Database database) throws IOException, LiquibaseException {
writeChangeSet(changeLogResource, ChangeSetMetadata.create(), database);
}
/**
* Write a Liquibase ChangeSet containing all tables as initial ChangeSet.
*
* @param changeLogResource resource that ChangeSet will be written to (or append to an existing ChangeSet file).
* @param metadata the ChangeSet metadata.
* @throws IOException in case of I/O errors.
*/
public void writeChangeSet(Resource changeLogResource, ChangeSetMetadata metadata) throws IOException {
DatabaseChangeLog databaseChangeLog = getDatabaseChangeLog(changeLogResource.getFile(), null);
ChangeSet changeSet = createChangeSet(metadata, databaseChangeLog);
writeChangeSet(databaseChangeLog, changeSet, changeLogResource.getFile());
}
/**
* Write a Liquibase ChangeSet using a {@link Database} to identify the differences between mapped entities and the
* existing database.
*
* @param changeLogResource resource that ChangeSet will be written to (or append to an existing ChangeSet file).
* @param metadata the ChangeSet metadata.
* @param database database to identify the differences.
* @throws LiquibaseException
* @throws IOException in case of I/O errors.
*/
public void writeChangeSet(Resource changeLogResource, ChangeSetMetadata metadata, Database database)
throws LiquibaseException, IOException {
DatabaseChangeLog databaseChangeLog = getDatabaseChangeLog(changeLogResource.getFile(), database);
ChangeSet changeSet = createChangeSet(metadata, database, databaseChangeLog);
writeChangeSet(databaseChangeLog, changeSet, changeLogResource.getFile());
}
/**
* Creates an initial ChangeSet.
*
* @param metadata must not be {@literal null}.
* @param databaseChangeLog must not be {@literal null}.
* @return the initial ChangeSet.
*/
protected ChangeSet createChangeSet(ChangeSetMetadata metadata, DatabaseChangeLog databaseChangeLog) {
return createChangeSet(metadata, initial(), databaseChangeLog);
}
/**
* Creates a diff ChangeSet by comparing {@link Database} with {@link MappingContext mapped entities}.
*
* @param metadata must not be {@literal null}.
* @param databaseChangeLog must not be {@literal null}.
* @return the diff ChangeSet.
*/
protected ChangeSet createChangeSet(ChangeSetMetadata metadata, Database database,
DatabaseChangeLog databaseChangeLog) throws LiquibaseException {
return createChangeSet(metadata, differenceOf(database), databaseChangeLog);
}
private ChangeSet createChangeSet(ChangeSetMetadata metadata, SchemaDiff difference,
DatabaseChangeLog databaseChangeLog) {
ChangeSet changeSet = new ChangeSet(metadata.getId(), metadata.getAuthor(), false, false, "", "", "",
databaseChangeLog);
generateTableAdditionsDeletions(changeSet, difference);
generateTableModifications(changeSet, difference);
return changeSet;
}
private SchemaDiff initial() {
Tables mappedEntities = Tables.from(mappingContext.getPersistentEntities().stream().filter(schemaFilter),
sqlTypeMapping, null);
return SchemaDiff.diff(mappedEntities, Tables.empty(), nameComparator);
}
private SchemaDiff differenceOf(Database database) throws LiquibaseException {
Tables existingTables = getLiquibaseModel(database);
Tables mappedEntities = Tables.from(mappingContext.getPersistentEntities().stream().filter(schemaFilter),
sqlTypeMapping, database.getDefaultCatalogName());
return SchemaDiff.diff(mappedEntities, existingTables, nameComparator);
}
private DatabaseChangeLog getDatabaseChangeLog(File changeLogFile, @Nullable Database database) throws IOException {
ChangeLogParameters parameters = database != null ? new ChangeLogParameters(database) : new ChangeLogParameters();
if (!changeLogFile.exists()) {
DatabaseChangeLog databaseChangeLog = new DatabaseChangeLog(changeLogFile.getName());
if (database != null) {
databaseChangeLog.setChangeLogParameters(parameters);
}
return databaseChangeLog;
}
try {
File parentDirectory = changeLogFile.getParentFile();
if (parentDirectory == null) {
parentDirectory = new File("./");
}
DirectoryResourceAccessor resourceAccessor = new DirectoryResourceAccessor(parentDirectory);
return changeLogParser.parse(changeLogFile.getName(), parameters, resourceAccessor);
} catch (ChangeLogParseException ex) {
throw new IOException(ex);
}
}
private void generateTableAdditionsDeletions(ChangeSet changeSet, SchemaDiff difference) {
for (Table table : difference.tableAdditions()) {
CreateTableChange newTable = changeTable(table);
changeSet.addChange(newTable);
}
for (Table table : difference.tableDeletions()) {
// Do not delete/drop table if it is an external application table
if (dropTableFilter.test(table.name())) {
changeSet.addChange(dropTable(table));
}
}
}
private void generateTableModifications(ChangeSet changeSet, SchemaDiff difference) {
for (TableDiff table : difference.tableDiffs()) {
if (!table.columnsToAdd().isEmpty()) {
changeSet.addChange(addColumns(table));
}
List<Column> deletedColumns = getColumnsToDrop(table);
if (!deletedColumns.isEmpty()) {
changeSet.addChange(dropColumns(table, deletedColumns));
}
}
}
private List<Column> getColumnsToDrop(TableDiff table) {
List<Column> deletedColumns = new ArrayList<>();
for (Column column : table.columnsToDrop()) {
if (dropColumnFilter.test(table.table().name(), column.name())) {
deletedColumns.add(column);
}
}
return deletedColumns;
}
private void writeChangeSet(DatabaseChangeLog databaseChangeLog, ChangeSet changeSet, File changeLogFile)
throws IOException {
List<ChangeLogChild> changes = new ArrayList<>(databaseChangeLog.getChangeSets());
changes.add(changeSet);
try (FileOutputStream fos = new FileOutputStream(changeLogFile)) {
changeLogSerializer.write(changes, fos);
}
}
private Tables getLiquibaseModel(Database targetDatabase) throws LiquibaseException {
CatalogAndSchema[] schemas = new CatalogAndSchema[] { targetDatabase.getDefaultSchema() };
SnapshotControl snapshotControl = new SnapshotControl(targetDatabase);
DatabaseSnapshot snapshot = SnapshotGeneratorFactory.getInstance().createSnapshot(schemas, targetDatabase,
snapshotControl);
Set<liquibase.structure.core.Table> tables = snapshot.get(liquibase.structure.core.Table.class);
List<Table> existingTables = new ArrayList<>(tables.size());
for (liquibase.structure.core.Table table : tables) {
// Exclude internal Liquibase tables from comparison
if (isLiquibaseTable.test(table.getName())) {
continue;
}
Table tableModel = new Table(table.getSchema().getCatalogName(), table.getName());
List<liquibase.structure.core.Column> columns = table.getColumns();
for (liquibase.structure.core.Column column : columns) {
String type = column.getType().toString();
boolean nullable = column.isNullable();
Column columnModel = new Column(column.getName(), type, nullable, false);
tableModel.columns().add(columnModel);
}
existingTables.add(tableModel);
}
return new Tables(existingTables);
}
private static AddColumnChange addColumns(TableDiff table) {
AddColumnChange addColumnChange = new AddColumnChange();
addColumnChange.setSchemaName(table.table().schema());
addColumnChange.setTableName(table.table().name());
for (Column column : table.columnsToAdd()) {
AddColumnConfig addColumn = createAddColumnChange(column);
addColumnChange.addColumn(addColumn);
}
return addColumnChange;
}
private static AddColumnConfig createAddColumnChange(Column column) {
AddColumnConfig config = new AddColumnConfig();
config.setName(column.name());
config.setType(column.type());
if (column.identity()) {
config.setAutoIncrement(true);
}
return config;
}
private static DropColumnChange dropColumns(TableDiff table, Collection<Column> deletedColumns) {
DropColumnChange dropColumnChange = new DropColumnChange();
dropColumnChange.setSchemaName(table.table().schema());
dropColumnChange.setTableName(table.table().name());
List<ColumnConfig> dropColumns = new ArrayList<>();
for (Column column : deletedColumns) {
ColumnConfig config = new ColumnConfig();
config.setName(column.name());
dropColumns.add(config);
}
dropColumnChange.setColumns(dropColumns);
return dropColumnChange;
}
private static CreateTableChange changeTable(Table table) {
CreateTableChange change = new CreateTableChange();
change.setSchemaName(table.schema());
change.setTableName(table.name());
for (Column column : table.columns()) {
ColumnConfig columnConfig = new ColumnConfig();
columnConfig.setName(column.name());
columnConfig.setType(column.type());
ConstraintsConfig constraints = new ConstraintsConfig();
constraints.setNullable(column.nullable());
if (column.identity()) {
columnConfig.setAutoIncrement(true);
constraints.setPrimaryKey(true);
}
columnConfig.setConstraints(constraints);
change.addColumn(columnConfig);
}
return change;
}
private static DropTableChange dropTable(Table table) {
DropTableChange change = new DropTableChange();
change.setSchemaName(table.schema());
change.setTableName(table.name());
change.setCascadeConstraints(true);
return change;
}
/**
* Metadata for a ChangeSet.
*/
interface ChangeSetMetadata {
/**
* Creates a new default {@link ChangeSetMetadata} using the {@link #DEFAULT_AUTHOR default author}.
*
* @return a new default {@link ChangeSetMetadata} using the {@link #DEFAULT_AUTHOR default author}.
*/
static ChangeSetMetadata create() {
return ofAuthor(LiquibaseChangeSetWriter.DEFAULT_AUTHOR);
}
/**
* Creates a new default {@link ChangeSetMetadata} using a generated {@code identifier} and provided {@code author}.
*
* @return a new default {@link ChangeSetMetadata} using a generated {@code identifier} and provided {@code author}.
*/
static ChangeSetMetadata ofAuthor(String author) {
return of(Long.toString(System.currentTimeMillis()), author);
}
/**
* Creates a new default {@link ChangeSetMetadata} using the provided {@code identifier} and {@code author}.
*
* @return a new default {@link ChangeSetMetadata} using the provided {@code identifier} and {@code author}.
*/
static ChangeSetMetadata of(String identifier, String author) {
return new DefaultChangeSetMetadata(identifier, author);
}
/**
* @return the ChangeSet identifier.
*/
String getId();
/**
* @return the ChangeSet author.
*/
String getAuthor();
}
private record DefaultChangeSetMetadata(String id, String author) implements ChangeSetMetadata {
private DefaultChangeSetMetadata {
Assert.hasText(id, "ChangeSet identifier must not be empty or null");
Assert.hasText(author, "Author must not be empty or null");
}
@Override
public String getId() {
return id();
}
@Override
public String getAuthor() {
return author();
}
}
}

147
spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/SchemaDiff.java

@ -0,0 +1,147 @@ @@ -0,0 +1,147 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.jdbc.core.mapping.schema;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.function.Function;
import java.util.function.Predicate;
/**
* This class is created to return the difference between a source and target {@link Tables} The difference consists of
* Table Additions, Deletions, and Modified Tables (i.e. table exists in both source and target - but has columns to add
* or delete)
*
* @author Kurt Niemi
* @since 3.2
*/
record SchemaDiff(List<Table> tableAdditions, List<Table> tableDeletions, List<TableDiff> tableDiffs) {
public static SchemaDiff diff(Tables mappedEntities, Tables existingTables, Comparator<String> nameComparator) {
Map<String, Table> existingIndex = createMapping(existingTables.tables(), SchemaDiff::getKey, nameComparator);
Map<String, Table> mappedIndex = createMapping(mappedEntities.tables(), SchemaDiff::getKey, nameComparator);
List<Table> toCreate = getTablesToCreate(mappedEntities, withTableKey(existingIndex::containsKey));
List<Table> toDrop = getTablesToDrop(existingTables, withTableKey(mappedIndex::containsKey));
List<TableDiff> tableDiffs = diffTable(mappedEntities, existingIndex, withTableKey(existingIndex::containsKey),
nameComparator);
return new SchemaDiff(toCreate, toDrop, tableDiffs);
}
private static List<Table> getTablesToCreate(Tables mappedEntities, Predicate<Table> excludeTable) {
List<Table> toCreate = new ArrayList<>(mappedEntities.tables().size());
for (Table table : mappedEntities.tables()) {
if (!excludeTable.test(table)) {
toCreate.add(table);
}
}
return toCreate;
}
private static List<Table> getTablesToDrop(Tables existingTables, Predicate<Table> excludeTable) {
List<Table> toDrop = new ArrayList<>(existingTables.tables().size());
for (Table table : existingTables.tables()) {
if (!excludeTable.test(table)) {
toDrop.add(table);
}
}
return toDrop;
}
private static List<TableDiff> diffTable(Tables mappedEntities, Map<String, Table> existingIndex,
Predicate<Table> includeTable, Comparator<String> nameComparator) {
List<TableDiff> tableDiffs = new ArrayList<>();
for (Table mappedEntity : mappedEntities.tables()) {
if (!includeTable.test(mappedEntity)) {
continue;
}
// TODO: How to handle changed columns (type?)
Table existingTable = existingIndex.get(getKey(mappedEntity));
TableDiff tableDiff = new TableDiff(mappedEntity);
Map<String, Column> mappedColumns = createMapping(mappedEntity.columns(), Column::name, nameComparator);
mappedEntity.keyColumns().forEach(it -> mappedColumns.put(it.name(), it));
Map<String, Column> existingColumns = createMapping(existingTable.columns(), Column::name, nameComparator);
existingTable.keyColumns().forEach(it -> existingColumns.put(it.name(), it));
// Identify deleted columns
Map<String, Column> toDelete = new TreeMap<>(nameComparator);
toDelete.putAll(existingColumns);
mappedColumns.keySet().forEach(toDelete::remove);
tableDiff.columnsToDrop().addAll(toDelete.values());
// Identify added columns
Map<String, Column> addedColumns = new TreeMap<>(nameComparator);
addedColumns.putAll(mappedColumns);
existingColumns.keySet().forEach(addedColumns::remove);
// Add columns in order. This order can interleave with existing columns.
for (Column column : mappedEntity.keyColumns()) {
if (addedColumns.containsKey(column.name())) {
tableDiff.columnsToAdd().add(column);
}
}
for (Column column : mappedEntity.columns()) {
if (addedColumns.containsKey(column.name())) {
tableDiff.columnsToAdd().add(column);
}
}
tableDiffs.add(tableDiff);
}
return tableDiffs;
}
private static <T> SortedMap<String, T> createMapping(List<T> items, Function<T, String> keyFunction,
Comparator<String> nameComparator) {
SortedMap<String, T> mapping = new TreeMap<>(nameComparator);
items.forEach(it -> mapping.put(keyFunction.apply(it), it));
return mapping;
}
private static String getKey(Table table) {
return table.schema() + "." + table.name();
}
private static Predicate<Table> withTableKey(Predicate<String> predicate) {
return it -> predicate.test(getKey(it));
}
}

101
spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/SqlTypeMapping.java

@ -0,0 +1,101 @@ @@ -0,0 +1,101 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.jdbc.core.mapping.schema;
import org.springframework.data.relational.core.mapping.RelationalPersistentProperty;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.util.ObjectUtils;
/**
* Strategy interface for mapping a {@link RelationalPersistentProperty} to a Database type.
*
* @author Kurt Niemi
* @author Mark Paluch
* @since 3.2
*/
@FunctionalInterface
public interface SqlTypeMapping {
/**
* Determines a column type for a persistent property.
*
* @param property the property for which the type should be determined.
* @return the SQL type to use, such as {@code VARCHAR} or {@code NUMERIC}. Can be {@literal null} if the strategy
* cannot provide a column type.
*/
@Nullable
String getColumnType(RelationalPersistentProperty property);
/**
* Returns the required column type for a persistent property or throws {@link IllegalArgumentException} if the type
* cannot be determined.
*
* @param property the property for which the type should be determined.
* @return the SQL type to use, such as {@code VARCHAR} or {@code NUMERIC}. Can be {@literal null} if the strategy
* cannot provide a column type.
* @throws IllegalArgumentException if the column type cannot be determined.
*/
default String getRequiredColumnType(RelationalPersistentProperty property) {
String columnType = getColumnType(property);
if (ObjectUtils.isEmpty(columnType)) {
throw new IllegalArgumentException(String.format("Cannot determined required column type for %s", property));
}
return columnType;
}
/**
* Determine whether a column is nullable.
*
* @param property the property for which nullability should be determined.
* @return whether the property is nullable.
*/
default boolean isNullable(RelationalPersistentProperty property) {
return !property.getActualType().isPrimitive();
}
/**
* Returns a composed {@link SqlTypeMapping} that represents a fallback of this type mapping and another. When
* evaluating the composed predicate, if this mapping does not contain a column mapping (i.e.
* {@link #getColumnType(RelationalPersistentProperty)} returns{@literal null}), then the {@code other} mapping is
* evaluated.
* <p>
* Any exceptions thrown during evaluation of either type mapping are relayed to the caller; if evaluation of this
* type mapping throws an exception, the {@code other} predicate will not be evaluated.
*
* @param other a type mapping that will be used as fallback, must not be {@literal null}.
* @return a composed type mapping
*/
default SqlTypeMapping and(SqlTypeMapping other) {
Assert.notNull(other, "Other SqlTypeMapping must not be null");
return property -> {
String columnType = getColumnType(property);
if (ObjectUtils.isEmpty(columnType)) {
return other.getColumnType(property);
}
return columnType;
};
}
}

65
spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Table.java

@ -0,0 +1,65 @@ @@ -0,0 +1,65 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.jdbc.core.mapping.schema;
import java.util.ArrayList;
import java.util.List;
import org.springframework.lang.Nullable;
import org.springframework.util.ObjectUtils;
/**
* Models a Table for generating SQL for Schema generation.
*
* @author Kurt Niemi
* @since 3.2
*/
record Table(@Nullable String schema, String name, List<Column> keyColumns, List<Column> columns) {
public Table(@Nullable String schema, String name) {
this(schema, name, new ArrayList<>(), new ArrayList<>());
}
public Table(String name) {
this(null, name);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Table table = (Table) o;
return ObjectUtils.nullSafeEquals(schema, table.schema) && ObjectUtils.nullSafeEquals(name, table.name);
}
@Override
public int hashCode() {
int result = 17;
result += ObjectUtils.nullSafeHashCode(this.schema);
result += ObjectUtils.nullSafeHashCode(this.name);
return result;
}
}

21
spring-data-relational/src/main/java/org/springframework/data/relational/core/mapping/schemasqlgeneration/DatabaseTypeMapping.java → spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/TableDiff.java

@ -13,17 +13,22 @@ @@ -13,17 +13,22 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.relational.core.mapping.schemasqlgeneration;
package org.springframework.data.jdbc.core.mapping.schema;
import java.util.ArrayList;
import java.util.List;
/**
* Interface for mapping a Java type to a Database type.
*
* To customize the mapping an instance of a class implementing {@link DatabaseTypeMapping} interface
* can be set on the {@link SchemaModel} class.
* Used to keep track of columns that should be added or deleted, when performing a difference between a source and
* target {@link Tables}.
*
* @author Kurt Niemi
* @since 3.2
*/
public interface DatabaseTypeMapping {
public String databaseTypeFromClass(Class<?> type);
}
record TableDiff(Table table, List<Column> columnsToAdd, List<Column> columnsToDrop) {
public TableDiff(Table table) {
this(table, new ArrayList<>(), new ArrayList<>());
}
}

79
spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Tables.java

@ -0,0 +1,79 @@ @@ -0,0 +1,79 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.jdbc.core.mapping.schema;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.springframework.data.annotation.Id;
import org.springframework.data.relational.core.mapping.RelationalMappingContext;
import org.springframework.data.relational.core.mapping.RelationalPersistentEntity;
import org.springframework.data.relational.core.mapping.RelationalPersistentProperty;
import org.springframework.lang.Nullable;
/**
* Model class that contains Table/Column information that can be used to generate SQL for Schema generation.
*
* @author Kurt Niemi
* @since 3.2
*/
record Tables(List<Table> tables) {
public static Tables from(RelationalMappingContext context) {
return from(context.getPersistentEntities().stream(), new DefaultSqlTypeMapping(), null);
}
// TODO: Add support (i.e. create tickets) to support mapped collections, entities, embedded properties, and aggregate
// references.
public static Tables from(Stream<? extends RelationalPersistentEntity<?>> persistentEntities,
SqlTypeMapping sqlTypeMapping, @Nullable String defaultSchema) {
List<Table> tables = persistentEntities
.filter(it -> it.isAnnotationPresent(org.springframework.data.relational.core.mapping.Table.class)) //
.map(entity -> {
Table table = new Table(defaultSchema, entity.getTableName().getReference());
Set<RelationalPersistentProperty> identifierColumns = new LinkedHashSet<>();
entity.getPersistentProperties(Id.class).forEach(identifierColumns::add);
for (RelationalPersistentProperty property : entity) {
if (property.isEntity() && !property.isEmbedded()) {
continue;
}
String columnType = sqlTypeMapping.getRequiredColumnType(property);
Column column = new Column(property.getColumnName().getReference(), sqlTypeMapping.getColumnType(property),
sqlTypeMapping.isNullable(property), identifierColumns.contains(property));
table.columns().add(column);
}
return table;
}).collect(Collectors.toList());
return new Tables(tables);
}
public static Tables empty() {
return new Tables(Collections.emptyList());
}
}

7
spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/package-info.java

@ -0,0 +1,7 @@ @@ -0,0 +1,7 @@
/**
* Schema creation and schema update integration with Liquibase.
*/
@NonNullApi
package org.springframework.data.jdbc.core.mapping.schema;
import org.springframework.lang.NonNullApi;

249
spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriterIntegrationTests.java

@ -0,0 +1,249 @@ @@ -0,0 +1,249 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.jdbc.core.mapping.schema;
import static org.assertj.core.api.Assertions.*;
import liquibase.change.AddColumnConfig;
import liquibase.change.ColumnConfig;
import liquibase.change.core.AddColumnChange;
import liquibase.change.core.DropColumnChange;
import liquibase.change.core.DropTableChange;
import liquibase.changelog.ChangeSet;
import liquibase.changelog.DatabaseChangeLog;
import liquibase.database.core.H2Database;
import liquibase.database.jvm.JdbcConnection;
import java.io.File;
import java.io.InputStream;
import java.nio.file.Files;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Set;
import org.assertj.core.api.ThrowingConsumer;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import org.springframework.core.io.ClassRelativeResourceLoader;
import org.springframework.core.io.FileSystemResource;
import org.springframework.data.annotation.Id;
import org.springframework.data.jdbc.core.mapping.schema.LiquibaseChangeSetWriter.ChangeSetMetadata;
import org.springframework.data.relational.core.mapping.RelationalMappingContext;
import org.springframework.data.relational.core.mapping.Table;
import org.springframework.data.util.Predicates;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType;
/**
* Integration tests for {@link LiquibaseChangeSetWriter}.
*
* @author Mark Paluch
*/
class LiquibaseChangeSetWriterIntegrationTests {
@Test // GH-1430
void shouldRemoveUnusedTable() {
withEmbeddedDatabase("unused-table.sql", c -> {
H2Database h2Database = new H2Database();
h2Database.setConnection(new JdbcConnection(c));
LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(new RelationalMappingContext());
writer.setDropTableFilter(Predicates.isTrue());
ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), h2Database, new DatabaseChangeLog());
assertThat(changeSet.getChanges()).hasSize(1);
assertThat(changeSet.getChanges().get(0)).isInstanceOf(DropTableChange.class);
DropTableChange drop = (DropTableChange) changeSet.getChanges().get(0);
assertThat(drop.getTableName()).isEqualToIgnoringCase("DELETE_ME");
});
}
@Test // GH-1430
void shouldNotDropTablesByDefault() {
withEmbeddedDatabase("unused-table.sql", c -> {
H2Database h2Database = new H2Database();
h2Database.setConnection(new JdbcConnection(c));
LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(new RelationalMappingContext());
ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), h2Database, new DatabaseChangeLog());
assertThat(changeSet.getChanges()).isEmpty();
});
}
@Test // GH-1430
void shouldAddColumnToTable() {
withEmbeddedDatabase("person-with-id-and-name.sql", c -> {
H2Database h2Database = new H2Database();
h2Database.setConnection(new JdbcConnection(c));
LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(contextOf(Person.class));
ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), h2Database, new DatabaseChangeLog());
assertThat(changeSet.getChanges()).hasSize(1);
assertThat(changeSet.getChanges().get(0)).isInstanceOf(AddColumnChange.class);
AddColumnChange addColumns = (AddColumnChange) changeSet.getChanges().get(0);
assertThat(addColumns.getTableName()).isEqualToIgnoringCase("PERSON");
assertThat(addColumns.getColumns()).hasSize(1);
AddColumnConfig addColumn = addColumns.getColumns().get(0);
assertThat(addColumn.getName()).isEqualTo("last_name");
assertThat(addColumn.getType()).isEqualTo("VARCHAR(255 BYTE)");
});
}
@Test // GH-1430
void shouldRemoveColumnFromTable() {
withEmbeddedDatabase("person-with-id-and-name.sql", c -> {
H2Database h2Database = new H2Database();
h2Database.setConnection(new JdbcConnection(c));
LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(contextOf(DifferentPerson.class));
writer.setDropColumnFilter((s, s2) -> true);
ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), h2Database, new DatabaseChangeLog());
assertThat(changeSet.getChanges()).hasSize(2);
assertThat(changeSet.getChanges().get(0)).isInstanceOf(AddColumnChange.class);
AddColumnChange addColumns = (AddColumnChange) changeSet.getChanges().get(0);
assertThat(addColumns.getTableName()).isEqualToIgnoringCase("PERSON");
assertThat(addColumns.getColumns()).hasSize(2);
assertThat(addColumns.getColumns()).extracting(AddColumnConfig::getName).containsExactly("my_id", "hello");
DropColumnChange dropColumns = (DropColumnChange) changeSet.getChanges().get(1);
assertThat(dropColumns.getTableName()).isEqualToIgnoringCase("PERSON");
assertThat(dropColumns.getColumns()).hasSize(2);
assertThat(dropColumns.getColumns()).extracting(ColumnConfig::getName).map(String::toUpperCase).contains("ID",
"FIRST_NAME");
});
}
@Test // GH-1430
void doesNotRemoveColumnsByDefault() {
withEmbeddedDatabase("person-with-id-and-name.sql", c -> {
H2Database h2Database = new H2Database();
h2Database.setConnection(new JdbcConnection(c));
LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(contextOf(DifferentPerson.class));
ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), h2Database, new DatabaseChangeLog());
assertThat(changeSet.getChanges()).hasSize(1);
assertThat(changeSet.getChanges().get(0)).isInstanceOf(AddColumnChange.class);
});
}
@Test // GH-1430
void shouldCreateNewChangeLog(@TempDir File tempDir) {
withEmbeddedDatabase("person-with-id-and-name.sql", c -> {
File changelogYml = new File(tempDir, "changelog.yml");
H2Database h2Database = new H2Database();
h2Database.setConnection(new JdbcConnection(c));
LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(contextOf(DifferentPerson.class));
writer.writeChangeSet(new FileSystemResource(changelogYml));
assertThat(tempDir).isDirectoryContaining(it -> it.getName().equalsIgnoreCase("changelog.yml"));
assertThat(changelogYml).content().contains("author: Spring Data Relational").contains("name: hello");
});
}
@Test // GH-1430
void shouldAppendToChangeLog(@TempDir File tempDir) {
withEmbeddedDatabase("person-with-id-and-name.sql", c -> {
H2Database h2Database = new H2Database();
h2Database.setConnection(new JdbcConnection(c));
File changelogYml = new File(tempDir, "changelog.yml");
try (InputStream is = getClass().getResourceAsStream("changelog.yml")) {
Files.copy(is, changelogYml.toPath());
}
LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(contextOf(DifferentPerson.class));
writer.writeChangeSet(new FileSystemResource(new File(tempDir, "changelog.yml")));
assertThat(changelogYml).content().contains("author: Someone").contains("author: Spring Data Relational")
.contains("name: hello");
});
}
RelationalMappingContext contextOf(Class<?>... classes) {
RelationalMappingContext context = new RelationalMappingContext();
context.setInitialEntitySet(Set.of(classes));
context.afterPropertiesSet();
return context;
}
void withEmbeddedDatabase(String script, ThrowingConsumer<Connection> c) {
EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder(new ClassRelativeResourceLoader(getClass())) //
.generateUniqueName(true) //
.setType(EmbeddedDatabaseType.H2) //
.setScriptEncoding("UTF-8") //
.ignoreFailedDrops(true) //
.addScript(script) //
.build();
try {
try (Connection connection = embeddedDatabase.getConnection()) {
c.accept(connection);
}
} catch (SQLException e) {
throw new RuntimeException(e);
} finally {
embeddedDatabase.shutdown();
}
}
@Table
static class Person {
@Id int id;
String firstName;
String lastName;
}
@Table("person")
static class DifferentPerson {
@Id int my_id;
String hello;
}
}

91
spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriterUnitTests.java

@ -0,0 +1,91 @@ @@ -0,0 +1,91 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.jdbc.core.mapping.schema;
import static org.assertj.core.api.Assertions.*;
import liquibase.change.ColumnConfig;
import liquibase.change.core.CreateTableChange;
import liquibase.changelog.ChangeSet;
import liquibase.changelog.DatabaseChangeLog;
import org.junit.jupiter.api.Test;
import org.springframework.data.annotation.Id;
import org.springframework.data.jdbc.core.mapping.schema.LiquibaseChangeSetWriter.ChangeSetMetadata;
import org.springframework.data.relational.core.mapping.RelationalMappingContext;
/**
* Unit tests for {@link LiquibaseChangeSetWriter}.
*
* @author Mark Paluch
*/
class LiquibaseChangeSetWriterUnitTests {
@Test // GH-1480
void newTableShouldCreateChangeSet() {
RelationalMappingContext context = new RelationalMappingContext();
context.getRequiredPersistentEntity(VariousTypes.class);
LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(context);
ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), new DatabaseChangeLog());
CreateTableChange createTable = (CreateTableChange) changeSet.getChanges().get(0);
assertThat(createTable.getColumns()).extracting(ColumnConfig::getName).containsSequence("id",
"luke_i_am_your_father", "dark_side", "floater");
assertThat(createTable.getColumns()).extracting(ColumnConfig::getType).containsSequence("BIGINT",
"VARCHAR(255 BYTE)", "TINYINT", "FLOAT");
ColumnConfig id = createTable.getColumns().get(0);
assertThat(id.getConstraints().isNullable()).isFalse();
}
@Test // GH-1480
void shouldApplySchemaFilter() {
RelationalMappingContext context = new RelationalMappingContext();
context.getRequiredPersistentEntity(VariousTypes.class);
context.getRequiredPersistentEntity(OtherTable.class);
LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(context);
writer.setSchemaFilter(it -> it.getName().contains("OtherTable"));
ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), new DatabaseChangeLog());
assertThat(changeSet.getChanges()).hasSize(1);
CreateTableChange createTable = (CreateTableChange) changeSet.getChanges().get(0);
assertThat(createTable.getTableName()).isEqualTo("other_table");
}
@org.springframework.data.relational.core.mapping.Table
static class VariousTypes {
@Id long id;
String lukeIAmYourFather;
Boolean darkSide;
Float floater;
Double doubleClass;
Integer integerClass;
}
@org.springframework.data.relational.core.mapping.Table
static class OtherTable {
@Id long id;
}
}

90
spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/SchemaDiffUnitTests.java

@ -0,0 +1,90 @@ @@ -0,0 +1,90 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.jdbc.core.mapping.schema;
import static org.assertj.core.api.Assertions.*;
import java.text.Collator;
import java.util.Locale;
import org.junit.jupiter.api.Test;
import org.springframework.data.relational.core.mapping.RelationalMappingContext;
/**
* Unit tests for the {@link Tables}.
*
* @author Kurt Niemi
* @author Mark Paluch
*/
class SchemaDiffUnitTests {
@Test
void testDiffSchema() {
RelationalMappingContext context = new RelationalMappingContext();
context.getRequiredPersistentEntity(SchemaDiffUnitTests.Table1.class);
context.getRequiredPersistentEntity(SchemaDiffUnitTests.Table2.class);
Tables mappedEntities = Tables.from(context);
Tables existingTables = Tables.from(context);
// Table table1 does not exist on the database yet.
existingTables.tables().remove(new Table("table1"));
// Add column to table2
Column newColumn = new Column("newcol", "VARCHAR(255)");
Table table2 = mappedEntities.tables().get(mappedEntities.tables().indexOf(new Table("table2")));
table2.columns().add(newColumn);
// This should be deleted
Table delete_me = new Table(null, "delete_me");
delete_me.columns().add(newColumn);
existingTables.tables().add(delete_me);
SchemaDiff diff = SchemaDiff.diff(mappedEntities, existingTables, Collator.getInstance(Locale.ROOT)::compare);
// Verify that newtable is an added table in the diff
assertThat(diff.tableAdditions()).isNotEmpty();
assertThat(diff.tableAdditions()).extracting(Table::name).containsOnly("table1");
assertThat(diff.tableDeletions()).isNotEmpty();
assertThat(diff.tableDeletions()).extracting(Table::name).containsOnly("delete_me");
assertThat(diff.tableDiffs()).hasSize(1);
assertThat(diff.tableDiffs()).extracting(it -> it.table().name()).containsOnly("table2");
assertThat(diff.tableDiffs().get(0).columnsToAdd()).contains(newColumn);
assertThat(diff.tableDiffs().get(0).columnsToDrop()).isEmpty();
}
// Test table classes for performing schema diff
@org.springframework.data.relational.core.mapping.Table
static class Table1 {
String force;
String be;
String with;
String you;
}
@org.springframework.data.relational.core.mapping.Table
static class Table2 {
String lukeIAmYourFather;
Boolean darkSide;
Float floater;
Double doubleClass;
Integer integerClass;
}
}

68
spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/SqlTypeMappingUnitTests.java

@ -0,0 +1,68 @@ @@ -0,0 +1,68 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.jdbc.core.mapping.schema;
import static org.assertj.core.api.Assertions.*;
import static org.mockito.Mockito.*;
import java.nio.charset.Charset;
import java.time.Duration;
import java.time.ZoneId;
import org.junit.jupiter.api.Test;
import org.springframework.data.relational.core.mapping.RelationalPersistentProperty;
/**
* Unit tests for {@link SqlTypeMapping}.
*
* @author Mark Paluch
*/
class SqlTypeMappingUnitTests {
SqlTypeMapping typeMapping = new DefaultSqlTypeMapping() //
.and(property -> property.getActualType().equals(ZoneId.class) ? "ZONEID" : null)
.and(property -> property.getActualType().equals(Duration.class) ? "INTERVAL" : null);
@Test // GH-1480
void shouldComposeTypeMapping() {
RelationalPersistentProperty p = mock(RelationalPersistentProperty.class);
doReturn(String.class).when(p).getActualType();
assertThat(typeMapping.getColumnType(p)).isEqualTo("VARCHAR(255 BYTE)");
assertThat(typeMapping.getRequiredColumnType(p)).isEqualTo("VARCHAR(255 BYTE)");
}
@Test // GH-1480
void shouldDelegateToCompositeTypeMapping() {
RelationalPersistentProperty p = mock(RelationalPersistentProperty.class);
doReturn(Duration.class).when(p).getActualType();
assertThat(typeMapping.getColumnType(p)).isEqualTo("INTERVAL");
assertThat(typeMapping.getRequiredColumnType(p)).isEqualTo("INTERVAL");
}
@Test // GH-1480
void shouldPassThruNullValues() {
RelationalPersistentProperty p = mock(RelationalPersistentProperty.class);
doReturn(Charset.class).when(p).getActualType();
assertThat(typeMapping.getColumnType(p)).isNull();
assertThatIllegalArgumentException().isThrownBy(() -> typeMapping.getRequiredColumnType(p));
}
}

16
spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/changelog.yml

@ -0,0 +1,16 @@ @@ -0,0 +1,16 @@
databaseChangeLog:
- changeSet:
id: '123'
author: Someone
objectQuotingStrategy: LEGACY
changes:
- createTable:
columns:
- column:
autoIncrement: true
constraints:
nullable: false
primaryKey: true
name: id
type: INT
tableName: foo

5
spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/person-with-id-and-name.sql

@ -0,0 +1,5 @@ @@ -0,0 +1,5 @@
CREATE TABLE person
(
id int,
first_name varchar(255)
);

4
spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/unused-table.sql

@ -0,0 +1,4 @@ @@ -0,0 +1,4 @@
CREATE TABLE DELETE_ME
(
id int
);

3
spring-data-relational/pom.xml

@ -54,7 +54,6 @@ @@ -54,7 +54,6 @@
<groupId>org.liquibase</groupId>
<artifactId>liquibase-core</artifactId>
<version>${liquibase.version}</version>
<scope>compile</scope>
<optional>true</optional>
</dependency>
@ -105,6 +104,6 @@ @@ -105,6 +104,6 @@
<scope>test</scope>
</dependency>
</dependencies>
</dependencies>
</project>

2
spring-data-relational/src/main/java/org/springframework/data/relational/core/mapping/RelationalMappingContext.java

@ -128,5 +128,5 @@ public class RelationalMappingContext @@ -128,5 +128,5 @@ public class RelationalMappingContext
persistentProperty.setForceQuote(isForceQuote());
persistentProperty.setExpressionEvaluator(this.expressionEvaluator);
}
}

47
spring-data-relational/src/main/java/org/springframework/data/relational/core/mapping/schemasqlgeneration/DefaultDatabaseTypeMapping.java

@ -1,47 +0,0 @@ @@ -1,47 +0,0 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.relational.core.mapping.schemasqlgeneration;
import java.util.HashMap;
/**
* Class that provides a default implementation of mapping Java type to a Database type.
*
* To customize the mapping an instance of a class implementing {@link DatabaseTypeMapping} interface
* can be set on the {@link SchemaModel} class
*
* @author Kurt Niemi
* @since 3.2
*/
public class DefaultDatabaseTypeMapping implements DatabaseTypeMapping {
final HashMap<Class<?>,String> mapClassToDatabaseType = new HashMap<Class<?>,String>();
public DefaultDatabaseTypeMapping() {
mapClassToDatabaseType.put(String.class, "VARCHAR(255 BYTE)");
mapClassToDatabaseType.put(Boolean.class, "TINYINT");
mapClassToDatabaseType.put(Double.class, "DOUBLE");
mapClassToDatabaseType.put(Float.class, "FLOAT");
mapClassToDatabaseType.put(Integer.class, "INT");
mapClassToDatabaseType.put(Long.class, "BIGINT");
}
public String databaseTypeFromClass(Class<?> type) {
return mapClassToDatabaseType.get(type);
}
}

351
spring-data-relational/src/main/java/org/springframework/data/relational/core/mapping/schemasqlgeneration/LiquibaseChangeSetGenerator.java

@ -1,351 +0,0 @@ @@ -1,351 +0,0 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.relational.core.mapping.schemasqlgeneration;
import liquibase.CatalogAndSchema;
import liquibase.change.AddColumnConfig;
import liquibase.change.ColumnConfig;
import liquibase.change.ConstraintsConfig;
import liquibase.change.core.AddColumnChange;
import liquibase.change.core.CreateTableChange;
import liquibase.change.core.DropColumnChange;
import liquibase.change.core.DropTableChange;
import liquibase.changelog.ChangeLogChild;
import liquibase.changelog.ChangeLogParameters;
import liquibase.changelog.ChangeSet;
import liquibase.changelog.DatabaseChangeLog;
import liquibase.database.Database;
import liquibase.exception.ChangeLogParseException;
import liquibase.exception.DatabaseException;
import liquibase.parser.core.yaml.YamlChangeLogParser;
import liquibase.resource.DirectoryResourceAccessor;
import liquibase.serializer.ChangeLogSerializer;
import liquibase.serializer.core.yaml.YamlChangeLogSerializer;
import liquibase.snapshot.DatabaseSnapshot;
import liquibase.snapshot.InvalidExampleException;
import liquibase.snapshot.SnapshotControl;
import liquibase.snapshot.SnapshotGeneratorFactory;
import liquibase.structure.core.Column;
import liquibase.structure.core.Table;
import org.springframework.core.io.Resource;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.function.Predicate;
/**
* Use this class to generate Liquibase change sets.
*
* First create a {@link SchemaModel} instance passing in a RelationalContext to have
* a model that represents the Table(s)/Column(s) that the code expects to exist.
*
* And then optionally create a Liquibase database object that points to an existing database
* if one desires to create a changeset that could be applied to that database.
*
* If a Liquibase database object is not used, then the change set created would be
* something that could be applied to an empty database to make it match the state of the code.
*
* Prior to applying the changeset one should review and make adjustments appropriately.
*
* @author Kurt Niemi
* @since 3.2
*/
public class LiquibaseChangeSetGenerator {
private final SchemaModel sourceModel;
private final Database targetDatabase;
/**
* If there should ever be future Liquibase tables that should not be deleted (removed), this
* predicate should be modified
*/
private final Predicate<String> liquibaseTables = table -> ( table.startsWith("DATABASECHANGELOG") );
/**
* By default existing tables in the target database are never deleted
*/
public Predicate<String> userApplicationTables = table -> ( true );
/**
* By default existing columns in the target database are never deleted.
* Columns will be passed into the predicate in the format TableName.ColumnName
*/
public Predicate<String> userApplicationTableColumns = table -> ( true );
/**
* Use this to generate a ChangeSet that can be used on an empty database
*
* @author Kurt Niemi
* @since 3.2
*
* @param sourceModel - Model representing table(s)/column(s) as existing in code
*/
public LiquibaseChangeSetGenerator(SchemaModel sourceModel) {
this.sourceModel = sourceModel;
this.targetDatabase = null;
}
/**
* Use this to generate a ChangeSet against an existing database
*
* @author Kurt Niemi
* @since 3.2
*
* @param sourceModel - Model representing table(s)/column(s) as existing in code
* @param targetDatabase - Existing Liquibase database
*/
public LiquibaseChangeSetGenerator(SchemaModel sourceModel, Database targetDatabase) {
this.sourceModel = sourceModel;
this.targetDatabase = targetDatabase;
}
/**
* Generates a Liquibase Changeset
*
* @author Kurt Niemi
* @since 3.2
*
* @param changeLogResource - Resource that changeset will be written to (or append to an existing ChangeSet file)
* @throws InvalidExampleException
* @throws DatabaseException
* @throws IOException
* @throws ChangeLogParseException
*/
public void generateLiquibaseChangeset(Resource changeLogResource) throws InvalidExampleException, DatabaseException, IOException, ChangeLogParseException {
String changeSetId = Long.toString(System.currentTimeMillis());
generateLiquibaseChangeset(changeLogResource, changeSetId, "Spring Data JDBC");
}
/**
* Generates a Liquibase Changeset
*
* @author Kurt Niemi
* @since 3.2
*
* @param changeLogResource - Resource that changeset will be written to (or append to an existing ChangeSet file)
* @param changeSetId - A unique value to identify the changeset
* @param changeSetAuthor - Author information to be written to changeset file.
* @throws InvalidExampleException
* @throws DatabaseException
* @throws IOException
* @throws ChangeLogParseException
*/
public void generateLiquibaseChangeset(Resource changeLogResource, String changeSetId, String changeSetAuthor) throws InvalidExampleException, DatabaseException, IOException, ChangeLogParseException {
SchemaDiff difference;
if (targetDatabase != null) {
SchemaModel liquibaseModel = getLiquibaseModel();
difference = new SchemaDiff(sourceModel,liquibaseModel);
} else {
difference = new SchemaDiff(sourceModel, new SchemaModel());
}
DatabaseChangeLog databaseChangeLog = getDatabaseChangeLog(changeLogResource.getFile());
ChangeSet changeSet = new ChangeSet(changeSetId, changeSetAuthor, false, false, "", "", "" , databaseChangeLog);
generateTableAdditionsDeletions(changeSet, difference);
generateTableModifications(changeSet, difference);
// File changeLogFile = new File(changeLogFilePath);
writeChangeSet(databaseChangeLog, changeSet, changeLogResource.getFile());
}
private void generateTableAdditionsDeletions(ChangeSet changeSet, SchemaDiff difference) {
for (TableModel table : difference.getTableAdditions()) {
CreateTableChange newTable = createAddTableChange(table);
changeSet.addChange(newTable);
}
for (TableModel table : difference.getTableDeletions()) {
// Do not delete/drop table if it is an external application table
if (!userApplicationTables.test(table.name())) {
DropTableChange dropTable = createDropTableChange(table);
changeSet.addChange(dropTable);
}
}
}
private void generateTableModifications(ChangeSet changeSet, SchemaDiff difference) {
for (TableDiff table : difference.getTableDiff()) {
if (table.addedColumns().size() > 0) {
AddColumnChange addColumnChange = new AddColumnChange();
addColumnChange.setSchemaName(table.tableModel().schema());
addColumnChange.setTableName(table.tableModel().name());
for (ColumnModel column : table.addedColumns()) {
AddColumnConfig addColumn = createAddColumnChange(column);
addColumnChange.addColumn(addColumn);
}
changeSet.addChange(addColumnChange);
}
ArrayList<ColumnModel> deletedColumns = new ArrayList<>();
for (ColumnModel columnModel : table.deletedColumns()) {
String fullName = table.tableModel().name() + "." + columnModel.name();
if (!userApplicationTableColumns.test(fullName)) {
deletedColumns.add(columnModel);
}
}
if (deletedColumns.size() > 0) {
DropColumnChange dropColumnChange = new DropColumnChange();
dropColumnChange.setSchemaName(table.tableModel().schema());
dropColumnChange.setTableName(table.tableModel().name());
List<ColumnConfig> dropColumns = new ArrayList<ColumnConfig>();
for (ColumnModel column : table.deletedColumns()) {
ColumnConfig config = new ColumnConfig();
config.setName(column.name());
dropColumns.add(config);
}
dropColumnChange.setColumns(dropColumns);
changeSet.addChange(dropColumnChange);
}
}
}
private DatabaseChangeLog getDatabaseChangeLog(File changeLogFile) {
DatabaseChangeLog databaseChangeLog = null;
try {
YamlChangeLogParser parser = new YamlChangeLogParser();
File parentDirectory = changeLogFile.getParentFile();
if (parentDirectory == null) {
parentDirectory = new File("./");
}
DirectoryResourceAccessor resourceAccessor = new DirectoryResourceAccessor(parentDirectory);
ChangeLogParameters parameters = new ChangeLogParameters();
databaseChangeLog = parser.parse(changeLogFile.getName(), parameters, resourceAccessor);
} catch (Exception ex) {
databaseChangeLog = new DatabaseChangeLog(changeLogFile.getAbsolutePath());
}
return databaseChangeLog;
}
private void writeChangeSet(DatabaseChangeLog databaseChangeLog, ChangeSet changeSet, File changeLogFile) throws FileNotFoundException, IOException {
ChangeLogSerializer serializer = new YamlChangeLogSerializer();
List changes = new ArrayList<ChangeLogChild>();
for (ChangeSet change : databaseChangeLog.getChangeSets()) {
changes.add(change);
}
changes.add(changeSet);
FileOutputStream fos = new FileOutputStream(changeLogFile);
serializer.write(changes, fos);
}
private SchemaModel getLiquibaseModel() throws DatabaseException, InvalidExampleException {
SchemaModel liquibaseModel = new SchemaModel();
CatalogAndSchema[] schemas = new CatalogAndSchema[] { targetDatabase.getDefaultSchema() };
SnapshotControl snapshotControl = new SnapshotControl(targetDatabase);
DatabaseSnapshot snapshot = SnapshotGeneratorFactory.getInstance().createSnapshot(schemas, targetDatabase, snapshotControl);
Set<Table> tables = snapshot.get(liquibase.structure.core.Table.class);
for (int i=0; i < sourceModel.getTableData().size(); i++) {
TableModel currentModel = sourceModel.getTableData().get(i);
if (currentModel.schema() == null || currentModel.schema().isEmpty()) {
TableModel newModel = new TableModel(targetDatabase.getDefaultSchema().getCatalogName(),
currentModel.name(), currentModel.columns(), currentModel.keyColumns());
sourceModel.getTableData().set(i, newModel);
}
}
for (liquibase.structure.core.Table table : tables) {
// Exclude internal Liquibase tables from comparison
if (liquibaseTables.test(table.getName())) {
continue;
}
TableModel tableModel = new TableModel(table.getSchema().getCatalogName(), table.getName());
liquibaseModel.getTableData().add(tableModel);
List<Column> columns = table.getColumns();
for (liquibase.structure.core.Column column : columns) {
String type = column.getType().toString();
boolean nullable = column.isNullable();
ColumnModel columnModel = new ColumnModel(column.getName(), type, nullable, false);
tableModel.columns().add(columnModel);
}
}
return liquibaseModel;
}
private AddColumnConfig createAddColumnChange(ColumnModel column) {
AddColumnConfig config = new AddColumnConfig();
config.setName(column.name());
config.setType(column.type());
if (column.identityColumn()) {
config.setAutoIncrement(true);
}
return config;
}
private CreateTableChange createAddTableChange(TableModel table) {
CreateTableChange change = new CreateTableChange();
change.setSchemaName(table.schema());
change.setTableName(table.name());
for (ColumnModel column : table.columns()) {
ColumnConfig columnConfig = new ColumnConfig();
columnConfig.setName(column.name());
columnConfig.setType(column.type());
if (column.identityColumn()) {
columnConfig.setAutoIncrement(true);
ConstraintsConfig constraints = new ConstraintsConfig();
constraints.setPrimaryKey(true);
columnConfig.setConstraints(constraints);
}
change.addColumn(columnConfig);
}
return change;
}
private DropTableChange createDropTableChange(TableModel table) {
DropTableChange change = new DropTableChange();
change.setSchemaName(table.schema());
change.setTableName(table.name());
change.setCascadeConstraints(true);
return change;
}
}

99
spring-data-relational/src/main/java/org/springframework/data/relational/core/mapping/schemasqlgeneration/SchemaDiff.java

@ -1,99 +0,0 @@ @@ -1,99 +0,0 @@
package org.springframework.data.relational.core.mapping.schemasqlgeneration;
import java.util.*;
/**
* This class is created to return the difference between a source and target {@link SchemaModel}
*
* The difference consists of Table Additions, Deletions, and Modified Tables (i.e. table
* exists in both source and target - but has columns to add or delete)
*
* @author Kurt Niemi
* @since 3.2
*/
public class SchemaDiff {
private final List<TableModel> tableAdditions = new ArrayList<TableModel>();
private final List<TableModel> tableDeletions = new ArrayList<TableModel>();
private final List<TableDiff> tableDiffs = new ArrayList<TableDiff>();
private SchemaModel source;
private SchemaModel target;
/**
*
* Compare two {@link SchemaModel} to identify differences.
*
* @param target - Model reflecting current database state
* @param source - Model reflecting desired database state
*/
public SchemaDiff(SchemaModel target, SchemaModel source) {
this.source = source;
this.target = target;
diffTableAdditionDeletion();
diffTable();
}
public List<TableModel> getTableAdditions() {
return tableAdditions;
}
public List<TableModel> getTableDeletions() {
return tableDeletions;
}
public List<TableDiff> getTableDiff() {
return tableDiffs;
}
private void diffTableAdditionDeletion() {
Set<TableModel> sourceTableData = new HashSet<TableModel>(source.getTableData());
Set<TableModel> targetTableData = new HashSet<TableModel>(target.getTableData());
// Identify deleted tables
Set<TableModel> deletedTables = new HashSet<TableModel>(sourceTableData);
deletedTables.removeAll(targetTableData);
tableDeletions.addAll(deletedTables);
// Identify added tables
Set<TableModel> addedTables = new HashSet<TableModel>(targetTableData);
addedTables.removeAll(sourceTableData);
tableAdditions.addAll(addedTables);
}
private void diffTable() {
HashMap<String, TableModel> sourceTablesMap = new HashMap<String,TableModel>();
for (TableModel table : source.getTableData()) {
sourceTablesMap.put(table.schema() + "." + table.name(), table);
}
Set<TableModel> existingTables = new HashSet<TableModel>(target.getTableData());
existingTables.removeAll(getTableAdditions());
for (TableModel table : existingTables) {
TableDiff tableDiff = new TableDiff(table);
tableDiffs.add(tableDiff);
TableModel sourceTable = sourceTablesMap.get(table.schema() + "." + table.name());
Set<ColumnModel> sourceTableData = new HashSet<ColumnModel>(sourceTable.columns());
Set<ColumnModel> targetTableData = new HashSet<ColumnModel>(table.columns());
// Identify deleted columns
Set<ColumnModel> deletedColumns = new HashSet<ColumnModel>(sourceTableData);
deletedColumns.removeAll(targetTableData);
tableDiff.deletedColumns().addAll(deletedColumns);
// Identify added columns
Set<ColumnModel> addedColumns = new HashSet<ColumnModel>(targetTableData);
addedColumns.removeAll(sourceTableData);
tableDiff.addedColumns().addAll(addedColumns);
}
}
}

83
spring-data-relational/src/main/java/org/springframework/data/relational/core/mapping/schemasqlgeneration/SchemaModel.java

@ -1,83 +0,0 @@ @@ -1,83 +0,0 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.relational.core.mapping.schemasqlgeneration;
import org.springframework.data.annotation.Id;
import org.springframework.data.mapping.PropertyHandler;
import org.springframework.data.relational.core.mapping.*;
import java.util.*;
/**
* Model class that contains Table/Column information that can be used
* to generate SQL for Schema generation.
*
* @author Kurt Niemi
* @since 3.2
*/
public class SchemaModel
{
private final List<TableModel> tableData = new ArrayList<TableModel>();
public DatabaseTypeMapping databaseTypeMapping;
/**
* Create empty model
*/
public SchemaModel() {
}
/**
* Create model from a RelationalMappingContext
*/
public SchemaModel(RelationalMappingContext context) {
if (databaseTypeMapping == null) {
databaseTypeMapping = new DefaultDatabaseTypeMapping();
}
for (RelationalPersistentEntity entity : context.getPersistentEntities()) {
TableModel tableModel = new TableModel(entity.getTableName().getReference());
Iterator<BasicRelationalPersistentProperty> iter =
entity.getPersistentProperties(Id.class).iterator();
Set<BasicRelationalPersistentProperty> setIdentifierColumns = new HashSet<BasicRelationalPersistentProperty>();
while (iter.hasNext()) {
BasicRelationalPersistentProperty p = iter.next();
setIdentifierColumns.add(p);
}
entity.doWithProperties((PropertyHandler) handler -> {
BasicRelationalPersistentProperty property = (BasicRelationalPersistentProperty)handler;
if (property.isEntity() && !property.isEmbedded()) {
return;
}
ColumnModel columnModel = new ColumnModel(property.getColumnName().getReference(),
databaseTypeMapping.databaseTypeFromClass(property.getActualType()),
true, setIdentifierColumns.contains(property));
tableModel.columns().add(columnModel);
});
tableData.add(tableModel);
}
}
public List<TableModel> getTableData() {
return tableData;
}
}

21
spring-data-relational/src/main/java/org/springframework/data/relational/core/mapping/schemasqlgeneration/TableDiff.java

@ -1,21 +0,0 @@ @@ -1,21 +0,0 @@
package org.springframework.data.relational.core.mapping.schemasqlgeneration;
import java.util.ArrayList;
import java.util.List;
/**
* Used to keep track of columns that have been added or deleted,
* when performing a difference between a source and target {@link SchemaModel}
*
* @author Kurt Niemi
* @since 3.2
*/
public record TableDiff(TableModel tableModel,
ArrayList<ColumnModel> addedColumns,
ArrayList<ColumnModel> deletedColumns) {
public TableDiff(TableModel tableModel) {
this(tableModel, new ArrayList<>(), new ArrayList<>());
}
}

69
spring-data-relational/src/main/java/org/springframework/data/relational/core/mapping/schemasqlgeneration/TableModel.java

@ -1,69 +0,0 @@ @@ -1,69 +0,0 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.relational.core.mapping.schemasqlgeneration;
import org.springframework.data.relational.core.sql.SqlIdentifier;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
/**
* Models a Table for generating SQL for Schema generation.
*
* @author Kurt Niemi
* @since 3.2
*/
public record TableModel(String schema, String name, List<ColumnModel> columns, List<ColumnModel> keyColumns) {
public TableModel(String schema, String name) {
this(schema, name, new ArrayList<>(), new ArrayList<>());
}
public TableModel(String name) {
this(null, name);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TableModel that = (TableModel) o;
// If we are missing the schema for either TableModel we will not treat that as being different
if (schema != null && that.schema != null && !schema.isEmpty() && !that.schema.isEmpty()) {
if (!Objects.equals(schema, that.schema)) {
return false;
}
}
if (!name.toUpperCase().equals(that.name.toUpperCase())) {
return false;
}
return true;
}
@Override
public int hashCode() {
return Objects.hash(name.toUpperCase());
}
}

98
spring-data-relational/src/test/java/org/springframework/data/relational/core/sql/SchemaModelTests.java

@ -1,98 +0,0 @@ @@ -1,98 +0,0 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.relational.core.sql;
import org.junit.jupiter.api.Test;
import org.springframework.data.relational.core.mapping.Column;
import org.springframework.data.relational.core.mapping.RelationalMappingContext;
import org.springframework.data.relational.core.mapping.Table;
import org.springframework.data.relational.core.mapping.schemasqlgeneration.*;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Unit tests for the {@link SchemaModel}.
*
* @author Kurt Niemi
*/
public class SchemaModelTests {
@Test
void testDiffSchema() {
RelationalMappingContext context = new RelationalMappingContext();
context.getRequiredPersistentEntity(SchemaModelTests.Table1.class);
context.getRequiredPersistentEntity(SchemaModelTests.Table2.class);
SchemaModel model = new SchemaModel(context);
SchemaModel newModel = new SchemaModel(context);
// Add column to table
ColumnModel newColumn = new ColumnModel("newcol", "VARCHAR(255)");
newModel.getTableData().get(0).columns().add(newColumn);
// Remove table
newModel.getTableData().remove(1);
// Add new table
TableModel newTable = new TableModel(null, "newtable");
newTable.columns().add(newColumn);
newModel.getTableData().add(newTable);
SchemaDiff diff = new SchemaDiff(model, newModel);
// Verify that newtable is an added table in the diff
assertThat(diff.getTableAdditions().size() > 0);
assertThat(diff.getTableAdditions().get(0).name().equals("table1"));
assertThat(diff.getTableDeletions().size() > 0);
assertThat(diff.getTableDeletions().get(0).name().equals("vader"));
assertThat(diff.getTableDiff().size() > 0);
assertThat(diff.getTableDiff().get(0).addedColumns().size() > 0);
assertThat(diff.getTableDiff().get(0).deletedColumns().size() > 0);
}
// Test table classes for performing schema diff
@Table
static class Table1 {
@Column
public String force;
@Column
public String be;
@Column
public String with;
@Column
public String you;
}
@Table
static class Table2 {
@Column
public String lukeIAmYourFather;
@Column
public Boolean darkSide;
@Column
public Float floater;
@Column
public Double doubleClass;
@Column
public Integer integerClass;
}
}
Loading…
Cancel
Save