Browse Source
We now support schema creation and schema migration by generating Liquibase changesets from mapped entities. We also support evolution of schema by comparing existing tables with mapped entities to compute differential changesets. Closes #756 Original pull request: #1520pull/1526/head
14 changed files with 886 additions and 74 deletions
@ -0,0 +1,47 @@
@@ -0,0 +1,47 @@
|
||||
/* |
||||
* Copyright 2023 the original author or authors. |
||||
* |
||||
* Licensed under the Apache License, Version 2.0 (the "License"); |
||||
* you may not use this file except in compliance with the License. |
||||
* You may obtain a copy of the License at |
||||
* |
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.springframework.data.relational.core.mapping.schemasqlgeneration; |
||||
|
||||
import org.springframework.data.relational.core.sql.SqlIdentifier; |
||||
|
||||
import java.util.Objects; |
||||
|
||||
|
||||
/** |
||||
* Models a Column for generating SQL for Schema generation. |
||||
* |
||||
* @author Kurt Niemi |
||||
* @since 3.2 |
||||
*/ |
||||
public record ColumnModel(String name, String type, boolean nullable, boolean identityColumn) { |
||||
|
||||
public ColumnModel(String name, String type) { |
||||
this(name, type, false, false); |
||||
} |
||||
|
||||
@Override |
||||
public boolean equals(Object o) { |
||||
if (this == o) return true; |
||||
if (o == null || getClass() != o.getClass()) return false; |
||||
ColumnModel that = (ColumnModel) o; |
||||
return Objects.equals(name, that.name); |
||||
} |
||||
|
||||
@Override |
||||
public int hashCode() { |
||||
return Objects.hash(name); |
||||
} |
||||
} |
||||
@ -0,0 +1,29 @@
@@ -0,0 +1,29 @@
|
||||
/* |
||||
* Copyright 2023 the original author or authors. |
||||
* |
||||
* Licensed under the Apache License, Version 2.0 (the "License"); |
||||
* you may not use this file except in compliance with the License. |
||||
* You may obtain a copy of the License at |
||||
* |
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.springframework.data.relational.core.mapping.schemasqlgeneration; |
||||
|
||||
/** |
||||
* Interface for mapping a Java type to a Database type. |
||||
* |
||||
* To customize the mapping an instance of a class implementing {@link DatabaseTypeMapping} interface
|
||||
* can be set on the {@link SchemaModel} class. |
||||
* |
||||
* @author Kurt Niemi |
||||
* @since 3.2 |
||||
*/ |
||||
public interface DatabaseTypeMapping { |
||||
public String databaseTypeFromClass(Class<?> type); |
||||
} |
||||
@ -0,0 +1,47 @@
@@ -0,0 +1,47 @@
|
||||
/* |
||||
* Copyright 2023 the original author or authors. |
||||
* |
||||
* Licensed under the Apache License, Version 2.0 (the "License"); |
||||
* you may not use this file except in compliance with the License. |
||||
* You may obtain a copy of the License at |
||||
* |
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.springframework.data.relational.core.mapping.schemasqlgeneration; |
||||
|
||||
import java.util.HashMap; |
||||
|
||||
|
||||
/** |
||||
* Class that provides a default implementation of mapping Java type to a Database type. |
||||
* |
||||
* To customize the mapping an instance of a class implementing {@link DatabaseTypeMapping} interface
|
||||
* can be set on the {@link SchemaModel} class
|
||||
* |
||||
* @author Kurt Niemi |
||||
* @since 3.2 |
||||
*/ |
||||
public class DefaultDatabaseTypeMapping implements DatabaseTypeMapping { |
||||
|
||||
final HashMap<Class<?>,String> mapClassToDatabaseType = new HashMap<Class<?>,String>(); |
||||
|
||||
public DefaultDatabaseTypeMapping() { |
||||
|
||||
mapClassToDatabaseType.put(String.class, "VARCHAR(255 BYTE)"); |
||||
mapClassToDatabaseType.put(Boolean.class, "TINYINT"); |
||||
mapClassToDatabaseType.put(Double.class, "DOUBLE"); |
||||
mapClassToDatabaseType.put(Float.class, "FLOAT"); |
||||
mapClassToDatabaseType.put(Integer.class, "INT"); |
||||
mapClassToDatabaseType.put(Long.class, "BIGINT"); |
||||
} |
||||
public String databaseTypeFromClass(Class<?> type) { |
||||
|
||||
return mapClassToDatabaseType.get(type); |
||||
} |
||||
} |
||||
@ -0,0 +1,351 @@
@@ -0,0 +1,351 @@
|
||||
/* |
||||
* Copyright 2023 the original author or authors. |
||||
* |
||||
* Licensed under the Apache License, Version 2.0 (the "License"); |
||||
* you may not use this file except in compliance with the License. |
||||
* You may obtain a copy of the License at |
||||
* |
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
|
||||
package org.springframework.data.relational.core.mapping.schemasqlgeneration; |
||||
|
||||
import liquibase.CatalogAndSchema; |
||||
import liquibase.change.AddColumnConfig; |
||||
import liquibase.change.ColumnConfig; |
||||
import liquibase.change.ConstraintsConfig; |
||||
import liquibase.change.core.AddColumnChange; |
||||
import liquibase.change.core.CreateTableChange; |
||||
import liquibase.change.core.DropColumnChange; |
||||
import liquibase.change.core.DropTableChange; |
||||
import liquibase.changelog.ChangeLogChild; |
||||
import liquibase.changelog.ChangeLogParameters; |
||||
import liquibase.changelog.ChangeSet; |
||||
import liquibase.changelog.DatabaseChangeLog; |
||||
import liquibase.database.Database; |
||||
import liquibase.exception.ChangeLogParseException; |
||||
import liquibase.exception.DatabaseException; |
||||
import liquibase.parser.core.yaml.YamlChangeLogParser; |
||||
import liquibase.resource.DirectoryResourceAccessor; |
||||
import liquibase.serializer.ChangeLogSerializer; |
||||
import liquibase.serializer.core.yaml.YamlChangeLogSerializer; |
||||
import liquibase.snapshot.DatabaseSnapshot; |
||||
import liquibase.snapshot.InvalidExampleException; |
||||
import liquibase.snapshot.SnapshotControl; |
||||
import liquibase.snapshot.SnapshotGeneratorFactory; |
||||
import liquibase.structure.core.Column; |
||||
import liquibase.structure.core.Table; |
||||
import org.springframework.core.io.Resource; |
||||
|
||||
import java.io.File; |
||||
import java.io.FileNotFoundException; |
||||
import java.io.FileOutputStream; |
||||
import java.io.IOException; |
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
import java.util.Set; |
||||
import java.util.function.Predicate; |
||||
|
||||
/** |
||||
* Use this class to generate Liquibase change sets. |
||||
* |
||||
* First create a {@link SchemaModel} instance passing in a RelationalContext to have |
||||
* a model that represents the Table(s)/Column(s) that the code expects to exist. |
||||
* |
||||
* And then optionally create a Liquibase database object that points to an existing database |
||||
* if one desires to create a changeset that could be applied to that database. |
||||
* |
||||
* If a Liquibase database object is not used, then the change set created would be |
||||
* something that could be applied to an empty database to make it match the state of the code. |
||||
* |
||||
* Prior to applying the changeset one should review and make adjustments appropriately. |
||||
* |
||||
* @author Kurt Niemi |
||||
* @since 3.2 |
||||
*/ |
||||
public class LiquibaseChangeSetGenerator { |
||||
|
||||
private final SchemaModel sourceModel; |
||||
private final Database targetDatabase; |
||||
|
||||
/** |
||||
* If there should ever be future Liquibase tables that should not be deleted (removed), this |
||||
* predicate should be modified |
||||
*/ |
||||
private final Predicate<String> liquibaseTables = table -> ( table.startsWith("DATABASECHANGELOG") ); |
||||
|
||||
/** |
||||
* By default existing tables in the target database are never deleted |
||||
*/ |
||||
public Predicate<String> userApplicationTables = table -> ( true ); |
||||
|
||||
/** |
||||
* By default existing columns in the target database are never deleted. |
||||
* Columns will be passed into the predicate in the format TableName.ColumnName |
||||
*/ |
||||
public Predicate<String> userApplicationTableColumns = table -> ( true ); |
||||
|
||||
/** |
||||
* Use this to generate a ChangeSet that can be used on an empty database |
||||
* |
||||
* @author Kurt Niemi |
||||
* @since 3.2 |
||||
* |
||||
* @param sourceModel - Model representing table(s)/column(s) as existing in code |
||||
*/ |
||||
public LiquibaseChangeSetGenerator(SchemaModel sourceModel) { |
||||
|
||||
this.sourceModel = sourceModel; |
||||
this.targetDatabase = null; |
||||
} |
||||
|
||||
/** |
||||
* Use this to generate a ChangeSet against an existing database |
||||
* |
||||
* @author Kurt Niemi |
||||
* @since 3.2 |
||||
* |
||||
* @param sourceModel - Model representing table(s)/column(s) as existing in code |
||||
* @param targetDatabase - Existing Liquibase database |
||||
*/ |
||||
public LiquibaseChangeSetGenerator(SchemaModel sourceModel, Database targetDatabase) { |
||||
|
||||
this.sourceModel = sourceModel; |
||||
this.targetDatabase = targetDatabase; |
||||
} |
||||
|
||||
/** |
||||
* Generates a Liquibase Changeset |
||||
* |
||||
* @author Kurt Niemi |
||||
* @since 3.2 |
||||
* |
||||
* @param changeLogResource - Resource that changeset will be written to (or append to an existing ChangeSet file) |
||||
* @throws InvalidExampleException |
||||
* @throws DatabaseException |
||||
* @throws IOException |
||||
* @throws ChangeLogParseException |
||||
*/ |
||||
public void generateLiquibaseChangeset(Resource changeLogResource) throws InvalidExampleException, DatabaseException, IOException, ChangeLogParseException { |
||||
|
||||
String changeSetId = Long.toString(System.currentTimeMillis()); |
||||
generateLiquibaseChangeset(changeLogResource, changeSetId, "Spring Data JDBC"); |
||||
} |
||||
|
||||
/** |
||||
* Generates a Liquibase Changeset |
||||
* |
||||
* @author Kurt Niemi |
||||
* @since 3.2 |
||||
* |
||||
* @param changeLogResource - Resource that changeset will be written to (or append to an existing ChangeSet file) |
||||
* @param changeSetId - A unique value to identify the changeset |
||||
* @param changeSetAuthor - Author information to be written to changeset file. |
||||
* @throws InvalidExampleException |
||||
* @throws DatabaseException |
||||
* @throws IOException |
||||
* @throws ChangeLogParseException |
||||
*/ |
||||
public void generateLiquibaseChangeset(Resource changeLogResource, String changeSetId, String changeSetAuthor) throws InvalidExampleException, DatabaseException, IOException, ChangeLogParseException { |
||||
|
||||
SchemaDiff difference; |
||||
|
||||
if (targetDatabase != null) { |
||||
SchemaModel liquibaseModel = getLiquibaseModel(); |
||||
difference = new SchemaDiff(sourceModel,liquibaseModel); |
||||
} else { |
||||
difference = new SchemaDiff(sourceModel, new SchemaModel()); |
||||
} |
||||
|
||||
DatabaseChangeLog databaseChangeLog = getDatabaseChangeLog(changeLogResource.getFile()); |
||||
|
||||
ChangeSet changeSet = new ChangeSet(changeSetId, changeSetAuthor, false, false, "", "", "" , databaseChangeLog); |
||||
|
||||
generateTableAdditionsDeletions(changeSet, difference); |
||||
generateTableModifications(changeSet, difference); |
||||
|
||||
|
||||
// File changeLogFile = new File(changeLogFilePath);
|
||||
writeChangeSet(databaseChangeLog, changeSet, changeLogResource.getFile()); |
||||
} |
||||
|
||||
private void generateTableAdditionsDeletions(ChangeSet changeSet, SchemaDiff difference) { |
||||
|
||||
for (TableModel table : difference.getTableAdditions()) { |
||||
CreateTableChange newTable = createAddTableChange(table); |
||||
changeSet.addChange(newTable); |
||||
} |
||||
|
||||
for (TableModel table : difference.getTableDeletions()) { |
||||
// Do not delete/drop table if it is an external application table
|
||||
if (!userApplicationTables.test(table.name())) { |
||||
DropTableChange dropTable = createDropTableChange(table); |
||||
changeSet.addChange(dropTable); |
||||
} |
||||
} |
||||
} |
||||
|
||||
private void generateTableModifications(ChangeSet changeSet, SchemaDiff difference) { |
||||
|
||||
for (TableDiff table : difference.getTableDiff()) { |
||||
|
||||
if (table.addedColumns().size() > 0) { |
||||
AddColumnChange addColumnChange = new AddColumnChange(); |
||||
addColumnChange.setSchemaName(table.tableModel().schema()); |
||||
addColumnChange.setTableName(table.tableModel().name()); |
||||
|
||||
for (ColumnModel column : table.addedColumns()) { |
||||
AddColumnConfig addColumn = createAddColumnChange(column); |
||||
addColumnChange.addColumn(addColumn); |
||||
} |
||||
|
||||
changeSet.addChange(addColumnChange); |
||||
} |
||||
|
||||
ArrayList<ColumnModel> deletedColumns = new ArrayList<>(); |
||||
for (ColumnModel columnModel : table.deletedColumns()) { |
||||
String fullName = table.tableModel().name() + "." + columnModel.name(); |
||||
|
||||
if (!userApplicationTableColumns.test(fullName)) { |
||||
deletedColumns.add(columnModel); |
||||
} |
||||
} |
||||
|
||||
if (deletedColumns.size() > 0) { |
||||
DropColumnChange dropColumnChange = new DropColumnChange(); |
||||
dropColumnChange.setSchemaName(table.tableModel().schema()); |
||||
dropColumnChange.setTableName(table.tableModel().name()); |
||||
|
||||
List<ColumnConfig> dropColumns = new ArrayList<ColumnConfig>(); |
||||
for (ColumnModel column : table.deletedColumns()) { |
||||
ColumnConfig config = new ColumnConfig(); |
||||
config.setName(column.name()); |
||||
dropColumns.add(config); |
||||
} |
||||
dropColumnChange.setColumns(dropColumns); |
||||
changeSet.addChange(dropColumnChange); |
||||
} |
||||
} |
||||
} |
||||
|
||||
private DatabaseChangeLog getDatabaseChangeLog(File changeLogFile) { |
||||
|
||||
DatabaseChangeLog databaseChangeLog = null; |
||||
|
||||
try { |
||||
YamlChangeLogParser parser = new YamlChangeLogParser(); |
||||
File parentDirectory = changeLogFile.getParentFile(); |
||||
if (parentDirectory == null) { |
||||
parentDirectory = new File("./"); |
||||
} |
||||
DirectoryResourceAccessor resourceAccessor = new DirectoryResourceAccessor(parentDirectory); |
||||
ChangeLogParameters parameters = new ChangeLogParameters(); |
||||
databaseChangeLog = parser.parse(changeLogFile.getName(), parameters, resourceAccessor); |
||||
} catch (Exception ex) { |
||||
databaseChangeLog = new DatabaseChangeLog(changeLogFile.getAbsolutePath()); |
||||
} |
||||
return databaseChangeLog; |
||||
} |
||||
|
||||
private void writeChangeSet(DatabaseChangeLog databaseChangeLog, ChangeSet changeSet, File changeLogFile) throws FileNotFoundException, IOException { |
||||
|
||||
ChangeLogSerializer serializer = new YamlChangeLogSerializer(); |
||||
List changes = new ArrayList<ChangeLogChild>(); |
||||
for (ChangeSet change : databaseChangeLog.getChangeSets()) { |
||||
changes.add(change); |
||||
} |
||||
changes.add(changeSet); |
||||
FileOutputStream fos = new FileOutputStream(changeLogFile); |
||||
serializer.write(changes, fos); |
||||
} |
||||
|
||||
private SchemaModel getLiquibaseModel() throws DatabaseException, InvalidExampleException { |
||||
SchemaModel liquibaseModel = new SchemaModel(); |
||||
|
||||
CatalogAndSchema[] schemas = new CatalogAndSchema[] { targetDatabase.getDefaultSchema() }; |
||||
SnapshotControl snapshotControl = new SnapshotControl(targetDatabase); |
||||
|
||||
DatabaseSnapshot snapshot = SnapshotGeneratorFactory.getInstance().createSnapshot(schemas, targetDatabase, snapshotControl); |
||||
Set<Table> tables = snapshot.get(liquibase.structure.core.Table.class); |
||||
|
||||
for (int i=0; i < sourceModel.getTableData().size(); i++) { |
||||
TableModel currentModel = sourceModel.getTableData().get(i); |
||||
if (currentModel.schema() == null || currentModel.schema().isEmpty()) { |
||||
TableModel newModel = new TableModel(targetDatabase.getDefaultSchema().getCatalogName(), |
||||
currentModel.name(), currentModel.columns(), currentModel.keyColumns()); |
||||
sourceModel.getTableData().set(i, newModel); |
||||
} |
||||
} |
||||
|
||||
for (liquibase.structure.core.Table table : tables) { |
||||
|
||||
// Exclude internal Liquibase tables from comparison
|
||||
if (liquibaseTables.test(table.getName())) { |
||||
continue; |
||||
} |
||||
|
||||
TableModel tableModel = new TableModel(table.getSchema().getCatalogName(), table.getName()); |
||||
liquibaseModel.getTableData().add(tableModel); |
||||
|
||||
List<Column> columns = table.getColumns(); |
||||
for (liquibase.structure.core.Column column : columns) { |
||||
String type = column.getType().toString(); |
||||
boolean nullable = column.isNullable(); |
||||
ColumnModel columnModel = new ColumnModel(column.getName(), type, nullable, false); |
||||
tableModel.columns().add(columnModel); |
||||
} |
||||
} |
||||
|
||||
return liquibaseModel; |
||||
} |
||||
|
||||
private AddColumnConfig createAddColumnChange(ColumnModel column) { |
||||
|
||||
AddColumnConfig config = new AddColumnConfig(); |
||||
config.setName(column.name()); |
||||
config.setType(column.type()); |
||||
|
||||
if (column.identityColumn()) { |
||||
config.setAutoIncrement(true); |
||||
} |
||||
return config; |
||||
} |
||||
|
||||
private CreateTableChange createAddTableChange(TableModel table) { |
||||
|
||||
CreateTableChange change = new CreateTableChange(); |
||||
change.setSchemaName(table.schema()); |
||||
change.setTableName(table.name()); |
||||
|
||||
for (ColumnModel column : table.columns()) { |
||||
ColumnConfig columnConfig = new ColumnConfig(); |
||||
columnConfig.setName(column.name()); |
||||
columnConfig.setType(column.type()); |
||||
|
||||
if (column.identityColumn()) { |
||||
columnConfig.setAutoIncrement(true); |
||||
ConstraintsConfig constraints = new ConstraintsConfig(); |
||||
constraints.setPrimaryKey(true); |
||||
columnConfig.setConstraints(constraints); |
||||
} |
||||
change.addColumn(columnConfig); |
||||
} |
||||
|
||||
return change; |
||||
} |
||||
|
||||
private DropTableChange createDropTableChange(TableModel table) { |
||||
DropTableChange change = new DropTableChange(); |
||||
change.setSchemaName(table.schema()); |
||||
change.setTableName(table.name()); |
||||
change.setCascadeConstraints(true); |
||||
|
||||
return change; |
||||
} |
||||
} |
||||
@ -0,0 +1,99 @@
@@ -0,0 +1,99 @@
|
||||
package org.springframework.data.relational.core.mapping.schemasqlgeneration; |
||||
|
||||
import java.util.*; |
||||
|
||||
/** |
||||
* This class is created to return the difference between a source and target {@link SchemaModel} |
||||
* |
||||
* The difference consists of Table Additions, Deletions, and Modified Tables (i.e. table |
||||
* exists in both source and target - but has columns to add or delete) |
||||
* |
||||
* @author Kurt Niemi |
||||
* @since 3.2 |
||||
*/ |
||||
public class SchemaDiff { |
||||
private final List<TableModel> tableAdditions = new ArrayList<TableModel>(); |
||||
private final List<TableModel> tableDeletions = new ArrayList<TableModel>(); |
||||
private final List<TableDiff> tableDiffs = new ArrayList<TableDiff>(); |
||||
|
||||
private SchemaModel source; |
||||
private SchemaModel target; |
||||
|
||||
/** |
||||
* |
||||
* Compare two {@link SchemaModel} to identify differences. |
||||
* |
||||
* @param target - Model reflecting current database state |
||||
* @param source - Model reflecting desired database state |
||||
*/ |
||||
public SchemaDiff(SchemaModel target, SchemaModel source) { |
||||
|
||||
this.source = source; |
||||
this.target = target; |
||||
|
||||
diffTableAdditionDeletion(); |
||||
diffTable(); |
||||
} |
||||
|
||||
public List<TableModel> getTableAdditions() { |
||||
|
||||
return tableAdditions; |
||||
} |
||||
|
||||
public List<TableModel> getTableDeletions() { |
||||
|
||||
return tableDeletions; |
||||
} |
||||
public List<TableDiff> getTableDiff() { |
||||
|
||||
return tableDiffs; |
||||
} |
||||
|
||||
private void diffTableAdditionDeletion() { |
||||
|
||||
Set<TableModel> sourceTableData = new HashSet<TableModel>(source.getTableData()); |
||||
Set<TableModel> targetTableData = new HashSet<TableModel>(target.getTableData()); |
||||
|
||||
// Identify deleted tables
|
||||
Set<TableModel> deletedTables = new HashSet<TableModel>(sourceTableData); |
||||
deletedTables.removeAll(targetTableData); |
||||
tableDeletions.addAll(deletedTables); |
||||
|
||||
// Identify added tables
|
||||
Set<TableModel> addedTables = new HashSet<TableModel>(targetTableData); |
||||
addedTables.removeAll(sourceTableData); |
||||
tableAdditions.addAll(addedTables); |
||||
} |
||||
|
||||
private void diffTable() { |
||||
|
||||
HashMap<String, TableModel> sourceTablesMap = new HashMap<String,TableModel>(); |
||||
for (TableModel table : source.getTableData()) { |
||||
sourceTablesMap.put(table.schema() + "." + table.name(), table); |
||||
} |
||||
|
||||
Set<TableModel> existingTables = new HashSet<TableModel>(target.getTableData()); |
||||
existingTables.removeAll(getTableAdditions()); |
||||
|
||||
for (TableModel table : existingTables) { |
||||
TableDiff tableDiff = new TableDiff(table); |
||||
tableDiffs.add(tableDiff); |
||||
|
||||
TableModel sourceTable = sourceTablesMap.get(table.schema() + "." + table.name()); |
||||
|
||||
Set<ColumnModel> sourceTableData = new HashSet<ColumnModel>(sourceTable.columns()); |
||||
Set<ColumnModel> targetTableData = new HashSet<ColumnModel>(table.columns()); |
||||
|
||||
// Identify deleted columns
|
||||
Set<ColumnModel> deletedColumns = new HashSet<ColumnModel>(sourceTableData); |
||||
deletedColumns.removeAll(targetTableData); |
||||
|
||||
tableDiff.deletedColumns().addAll(deletedColumns); |
||||
|
||||
// Identify added columns
|
||||
Set<ColumnModel> addedColumns = new HashSet<ColumnModel>(targetTableData); |
||||
addedColumns.removeAll(sourceTableData); |
||||
tableDiff.addedColumns().addAll(addedColumns); |
||||
} |
||||
} |
||||
} |
||||
@ -0,0 +1,83 @@
@@ -0,0 +1,83 @@
|
||||
/* |
||||
* Copyright 2023 the original author or authors. |
||||
* |
||||
* Licensed under the Apache License, Version 2.0 (the "License"); |
||||
* you may not use this file except in compliance with the License. |
||||
* You may obtain a copy of the License at |
||||
* |
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.springframework.data.relational.core.mapping.schemasqlgeneration; |
||||
|
||||
import org.springframework.data.annotation.Id; |
||||
import org.springframework.data.mapping.PropertyHandler; |
||||
import org.springframework.data.relational.core.mapping.*; |
||||
|
||||
import java.util.*; |
||||
|
||||
/** |
||||
* Model class that contains Table/Column information that can be used |
||||
* to generate SQL for Schema generation. |
||||
* |
||||
* @author Kurt Niemi |
||||
* @since 3.2 |
||||
*/ |
||||
public class SchemaModel |
||||
{ |
||||
private final List<TableModel> tableData = new ArrayList<TableModel>(); |
||||
public DatabaseTypeMapping databaseTypeMapping; |
||||
|
||||
/** |
||||
* Create empty model |
||||
*/ |
||||
public SchemaModel() { |
||||
|
||||
} |
||||
|
||||
/** |
||||
* Create model from a RelationalMappingContext |
||||
*/ |
||||
public SchemaModel(RelationalMappingContext context) { |
||||
|
||||
if (databaseTypeMapping == null) { |
||||
databaseTypeMapping = new DefaultDatabaseTypeMapping(); |
||||
} |
||||
|
||||
for (RelationalPersistentEntity entity : context.getPersistentEntities()) { |
||||
TableModel tableModel = new TableModel(entity.getTableName().getReference()); |
||||
|
||||
Iterator<BasicRelationalPersistentProperty> iter = |
||||
entity.getPersistentProperties(Id.class).iterator(); |
||||
Set<BasicRelationalPersistentProperty> setIdentifierColumns = new HashSet<BasicRelationalPersistentProperty>(); |
||||
while (iter.hasNext()) { |
||||
BasicRelationalPersistentProperty p = iter.next(); |
||||
setIdentifierColumns.add(p); |
||||
} |
||||
|
||||
entity.doWithProperties((PropertyHandler) handler -> { |
||||
BasicRelationalPersistentProperty property = (BasicRelationalPersistentProperty)handler; |
||||
|
||||
if (property.isEntity() && !property.isEmbedded()) { |
||||
return; |
||||
} |
||||
|
||||
ColumnModel columnModel = new ColumnModel(property.getColumnName().getReference(), |
||||
databaseTypeMapping.databaseTypeFromClass(property.getActualType()), |
||||
true, setIdentifierColumns.contains(property)); |
||||
tableModel.columns().add(columnModel); |
||||
}); |
||||
|
||||
tableData.add(tableModel); |
||||
} |
||||
} |
||||
|
||||
public List<TableModel> getTableData() { |
||||
return tableData; |
||||
} |
||||
} |
||||
@ -0,0 +1,21 @@
@@ -0,0 +1,21 @@
|
||||
package org.springframework.data.relational.core.mapping.schemasqlgeneration; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
|
||||
/** |
||||
* Used to keep track of columns that have been added or deleted, |
||||
* when performing a difference between a source and target {@link SchemaModel} |
||||
* |
||||
* @author Kurt Niemi |
||||
* @since 3.2 |
||||
*/ |
||||
public record TableDiff(TableModel tableModel, |
||||
ArrayList<ColumnModel> addedColumns, |
||||
ArrayList<ColumnModel> deletedColumns) { |
||||
|
||||
public TableDiff(TableModel tableModel) { |
||||
this(tableModel, new ArrayList<>(), new ArrayList<>()); |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,69 @@
@@ -0,0 +1,69 @@
|
||||
/* |
||||
* Copyright 2023 the original author or authors. |
||||
* |
||||
* Licensed under the Apache License, Version 2.0 (the "License"); |
||||
* you may not use this file except in compliance with the License. |
||||
* You may obtain a copy of the License at |
||||
* |
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.springframework.data.relational.core.mapping.schemasqlgeneration; |
||||
|
||||
import org.springframework.data.relational.core.sql.SqlIdentifier; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
import java.util.Objects; |
||||
|
||||
/** |
||||
* Models a Table for generating SQL for Schema generation. |
||||
* |
||||
* @author Kurt Niemi |
||||
* @since 3.2 |
||||
*/ |
||||
public record TableModel(String schema, String name, List<ColumnModel> columns, List<ColumnModel> keyColumns) { |
||||
public TableModel(String schema, String name) { |
||||
this(schema, name, new ArrayList<>(), new ArrayList<>()); |
||||
} |
||||
|
||||
public TableModel(String name) { |
||||
this(null, name); |
||||
} |
||||
|
||||
@Override |
||||
public boolean equals(Object o) { |
||||
|
||||
if (this == o) { |
||||
return true; |
||||
} |
||||
|
||||
if (o == null || getClass() != o.getClass()) { |
||||
return false; |
||||
} |
||||
|
||||
TableModel that = (TableModel) o; |
||||
|
||||
// If we are missing the schema for either TableModel we will not treat that as being different
|
||||
if (schema != null && that.schema != null && !schema.isEmpty() && !that.schema.isEmpty()) { |
||||
if (!Objects.equals(schema, that.schema)) { |
||||
return false; |
||||
} |
||||
} |
||||
if (!name.toUpperCase().equals(that.name.toUpperCase())) { |
||||
return false; |
||||
} |
||||
return true; |
||||
} |
||||
|
||||
@Override |
||||
public int hashCode() { |
||||
|
||||
return Objects.hash(name.toUpperCase()); |
||||
} |
||||
} |
||||
@ -0,0 +1,98 @@
@@ -0,0 +1,98 @@
|
||||
/* |
||||
* Copyright 2023 the original author or authors. |
||||
* |
||||
* Licensed under the Apache License, Version 2.0 (the "License"); |
||||
* you may not use this file except in compliance with the License. |
||||
* You may obtain a copy of the License at |
||||
* |
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.springframework.data.relational.core.sql; |
||||
|
||||
import org.junit.jupiter.api.Test; |
||||
import org.springframework.data.relational.core.mapping.Column; |
||||
import org.springframework.data.relational.core.mapping.RelationalMappingContext; |
||||
import org.springframework.data.relational.core.mapping.Table; |
||||
import org.springframework.data.relational.core.mapping.schemasqlgeneration.*; |
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat; |
||||
|
||||
/** |
||||
* Unit tests for the {@link SchemaModel}. |
||||
* |
||||
* @author Kurt Niemi |
||||
*/ |
||||
public class SchemaModelTests { |
||||
|
||||
@Test |
||||
void testDiffSchema() { |
||||
|
||||
RelationalMappingContext context = new RelationalMappingContext(); |
||||
context.getRequiredPersistentEntity(SchemaModelTests.Table1.class); |
||||
context.getRequiredPersistentEntity(SchemaModelTests.Table2.class); |
||||
|
||||
SchemaModel model = new SchemaModel(context); |
||||
|
||||
SchemaModel newModel = new SchemaModel(context); |
||||
|
||||
// Add column to table
|
||||
ColumnModel newColumn = new ColumnModel("newcol", "VARCHAR(255)"); |
||||
newModel.getTableData().get(0).columns().add(newColumn); |
||||
|
||||
// Remove table
|
||||
newModel.getTableData().remove(1); |
||||
|
||||
// Add new table
|
||||
TableModel newTable = new TableModel(null, "newtable"); |
||||
newTable.columns().add(newColumn); |
||||
newModel.getTableData().add(newTable); |
||||
|
||||
SchemaDiff diff = new SchemaDiff(model, newModel); |
||||
|
||||
// Verify that newtable is an added table in the diff
|
||||
assertThat(diff.getTableAdditions().size() > 0); |
||||
assertThat(diff.getTableAdditions().get(0).name().equals("table1")); |
||||
|
||||
assertThat(diff.getTableDeletions().size() > 0); |
||||
assertThat(diff.getTableDeletions().get(0).name().equals("vader")); |
||||
|
||||
assertThat(diff.getTableDiff().size() > 0); |
||||
assertThat(diff.getTableDiff().get(0).addedColumns().size() > 0); |
||||
assertThat(diff.getTableDiff().get(0).deletedColumns().size() > 0); |
||||
} |
||||
|
||||
// Test table classes for performing schema diff
|
||||
@Table |
||||
static class Table1 { |
||||
@Column |
||||
public String force; |
||||
@Column |
||||
public String be; |
||||
@Column |
||||
public String with; |
||||
@Column |
||||
public String you; |
||||
} |
||||
|
||||
@Table |
||||
static class Table2 { |
||||
@Column |
||||
public String lukeIAmYourFather; |
||||
@Column |
||||
public Boolean darkSide; |
||||
@Column |
||||
public Float floater; |
||||
@Column |
||||
public Double doubleClass; |
||||
@Column |
||||
public Integer integerClass; |
||||
} |
||||
|
||||
|
||||
} |
||||
Loading…
Reference in new issue