Browse Source

#413 - Refine reference documentation after Spring R2DBC migration.

pull/1188/head
Mark Paluch 5 years ago
parent
commit
ce57b25cd0
No known key found for this signature in database
GPG Key ID: 51A00FA751B91849
  1. 6
      src/main/asciidoc/index.adoc
  2. 72
      src/main/asciidoc/reference/r2dbc-connections.adoc
  3. 92
      src/main/asciidoc/reference/r2dbc-core.adoc
  4. 144
      src/main/asciidoc/reference/r2dbc-databaseclient.adoc
  5. 270
      src/main/asciidoc/reference/r2dbc-fluent.adoc
  6. 102
      src/main/asciidoc/reference/r2dbc-initialization.adoc
  7. 63
      src/main/asciidoc/reference/r2dbc-repositories.adoc
  8. 208
      src/main/asciidoc/reference/r2dbc-sql.adoc
  9. 194
      src/main/asciidoc/reference/r2dbc-template.adoc
  10. 86
      src/main/asciidoc/reference/r2dbc-transactions.adoc
  11. 8
      src/main/asciidoc/reference/r2dbc.adoc
  12. 48
      src/test/java/org/springframework/data/r2dbc/documentation/Person.java
  13. 39
      src/test/java/org/springframework/data/r2dbc/documentation/PersonRepository.java
  14. 53
      src/test/java/org/springframework/data/r2dbc/documentation/PersonRepositoryTests.java
  15. 60
      src/test/java/org/springframework/data/r2dbc/documentation/R2dbcApp.java
  16. 109
      src/test/java/org/springframework/data/r2dbc/documentation/R2dbcEntityTemplateSnippets.java

6
src/main/asciidoc/index.adoc

@ -7,6 +7,8 @@ ifdef::backend-epub3[:front-cover-image: image:epub-cover.png[Front Cover,1050,1 @@ -7,6 +7,8 @@ ifdef::backend-epub3[:front-cover-image: image:epub-cover.png[Front Cover,1050,1
:spring-data-r2dbc-javadoc: https://docs.spring.io/spring-data/r2dbc/docs/{version}/api
:spring-framework-ref: https://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference
:reactiveStreamsJavadoc: https://www.reactive-streams.org/reactive-streams-{reactiveStreamsVersion}-javadoc
:example-root: ../../../src/test/java/org/springframework/data/r2dbc/documentation
:tabsize: 2
(C) 2018-2020 The original authors.
@ -38,10 +40,6 @@ include::{spring-data-commons-docs}/auditing.adoc[leveloffset=+1] @@ -38,10 +40,6 @@ include::{spring-data-commons-docs}/auditing.adoc[leveloffset=+1]
include::reference/r2dbc-auditing.adoc[leveloffset=+1]
include::reference/r2dbc-connections.adoc[leveloffset=+1]
include::reference/r2dbc-initialization.adoc[leveloffset=+1]
include::reference/mapping.adoc[leveloffset=+1]
include::reference/kotlin.adoc[leveloffset=+1]

72
src/main/asciidoc/reference/r2dbc-connections.adoc

@ -1,72 +0,0 @@ @@ -1,72 +0,0 @@
[[r2dbc.connections]]
= Controlling Database Connections
This section covers:
* <<r2dbc.connections.connectionfactory>>
* <<r2dbc.connections.ConnectionFactoryUtils>>
* <<r2dbc.connections.SmartConnectionFactory>>
* <<r2dbc.connections.TransactionAwareConnectionFactoryProxy>>
* <<r2dbc.connections.ConnectionFactoryTransactionManager>>
[[r2dbc.connections.connectionfactory]]
== Using `ConnectionFactory`
Spring obtains an R2DBC connection to the database through a `ConnectionFactory`.
A `ConnectionFactory` is part of the R2DBC specification and is a generalized connection factory.
It lets a container or a framework hide connection pooling and transaction management issues from the application code.
As a developer, you need not know details about how to connect to the database.
That is the responsibility of the administrator who sets up the `ConnectionFactory`.
You most likely fill both roles as you develop and test code, but you do not necessarily have to know how the production data source is configured.
When you use Spring's R2DBC layer, you can configure your own with a connection pool implementation provided by a third party.
A popular implementation is R2DBC `Pool`.
Implementations in the Spring distribution are meant only for testing purposes and do not provide pooling.
To configure a `ConnectionFactory`:
. Obtain a connection with `ConnectionFactory` as you typically obtain an R2DBC `ConnectionFactory`.
. Provide an R2DBC URL.
(See the documentation for your driver for the correct value.)
The following example shows how to configure a `ConnectionFactory` in Java:
====
[source,java,indent=0]
[subs="verbatim,quotes"]
----
ConnectionFactory factory = ConnectionFactories.get("r2dbc:h2:mem:///test?options=DB_CLOSE_DELAY=-1;DB_CLOSE_ON_EXIT=FALSE");
----
====
[[r2dbc.connections.ConnectionFactoryUtils]]
== Using `ConnectionFactoryUtils`
The `ConnectionFactoryUtils` class is a convenient and powerful helper class that provides `static` methods to obtain connections from `ConnectionFactory` and close connections (if necessary).
It supports subscriber ``Context``-bound connections with, for example `ConnectionFactoryTransactionManager`.
[[r2dbc.connections.SmartConnectionFactory]]
== Implementing `SmartConnectionFactory`
The `SmartConnectionFactory` interface should be implemented by classes that can provide a connection to a relational database.
It extends the `ConnectionFactory` interface to let classes that use it query whether the connection should be closed after a given operation.
This usage is efficient when you know that you need to reuse a connection.
[[r2dbc.connections.TransactionAwareConnectionFactoryProxy]]
== Using `TransactionAwareConnectionFactoryProxy`
`TransactionAwareConnectionFactoryProxy` is a proxy for a target `ConnectionFactory`.
The proxy wraps that target `ConnectionFactory` to add awareness of Spring-managed transactions.
[[r2dbc.connections.ConnectionFactoryTransactionManager]]
== Using `ConnectionFactoryTransactionManager`
The `ConnectionFactoryTransactionManager` class is a `ReactiveTransactionManager` implementation for single R2DBC datasources.
It binds an R2DBC connection from the specified data source to the subscriber `Context`, potentially allowing for one subscriber connection for each data source.
Application code is required to retrieve the R2DBC connection through `ConnectionFactoryUtils.getConnection(ConnectionFactory)`, instead of R2DBC's standard `ConnectionFactory.create()`.
All framework classes (such as `DatabaseClient`) use this strategy implicitly.
If not used with this transaction manager, the lookup strategy behaves exactly like the common one. Thus, it can be used in any case.
The `ConnectionFactoryTransactionManager` class supports custom isolation levels that get applied to the connection.

92
src/main/asciidoc/reference/r2dbc-core.adoc

@ -1,14 +1,13 @@ @@ -1,14 +1,13 @@
R2DBC contains a wide range of features:
* Spring configuration support with Java-based `@Configuration` classes for an R2DBC driver instance.
* A `DatabaseClient` helper class that increases productivity when performing common R2DBC operations with integrated object mapping between rows and POJOs.
* Exception translation into Spring's portable Data Access Exception hierarchy.
* `R2dbcEntityTemplate` as central class for entity-bound operations that increases productivity when performing common R2DBC operations with integrated object mapping between rows and POJOs.
* Feature-rich object mapping integrated with Spring's Conversion Service.
* Annotation-based mapping metadata that is extensible to support other metadata formats.
* Automatic implementation of Repository interfaces, including support for custom query methods.
For most tasks, you should use `DatabaseClient` or the repository support, which both use the rich mapping functionality.
`DatabaseClient` is the place to look for accessing functionality such as ad-hoc CRUD operations.
For most tasks, you should use `R2dbcEntityTemplate` or the repository support, which both use the rich mapping functionality.
`R2dbcEntityTemplate` is the place to look for accessing functionality such as ad-hoc CRUD operations.
[[r2dbc.getting-started]]
== Getting Started
@ -92,37 +91,9 @@ logging.level.org.springframework.data.r2dbc=DEBUG @@ -92,37 +91,9 @@ logging.level.org.springframework.data.r2dbc=DEBUG
Then you can, for example, create a `Person` class to persist, as follows:
====
[source,java]
[source,java,indent=0]
----
package org.spring.r2dbc.example;
public class Person {
private String id;
private String name;
private int age;
public Person(String id, String name, int age) {
this.id = id;
this.name = name;
this.age = age;
}
public String getId() {
return id;
}
public String getName() {
return name;
}
public int getAge() {
return age;
}
@Override
public String toString() {
return "Person [id=" + id + ", name=" + name + ", age=" + age + "]";
}
}
include::../{example-root}/Person.java[tags=class]
----
====
@ -140,48 +111,11 @@ CREATE TABLE person @@ -140,48 +111,11 @@ CREATE TABLE person
You also need a main application to run, as follows:
====
[source,java]
[source,java,indent=0]
----
package org.spring.r2dbc.example;
public class R2dbcApp {
private static final Log log = LogFactory.getLog(R2dbcApp.class);
public static void main(String[] args) throws Exception {
ConnectionFactory connectionFactory = ConnectionFactories.get("r2dbc:h2:mem:///test?options=DB_CLOSE_DELAY=-1;DB_CLOSE_ON_EXIT=FALSE");
DatabaseClient client = DatabaseClient.create(connectionFactory);
client.execute("CREATE TABLE person" +
"(id VARCHAR(255) PRIMARY KEY," +
"name VARCHAR(255)," +
"age INT)")
.fetch()
.rowsUpdated()
.as(StepVerifier::create)
.expectNextCount(1)
.verifyComplete();
client.insert()
.into(Person.class)
.using(new Person("joe", "Joe", 34))
.then()
.as(StepVerifier::create)
.verifyComplete();
client.select()
.from(Person.class)
.fetch()
.first()
.doOnNext(it -> log.info(it))
.as(StepVerifier::create)
.expectNextCount(1)
.verifyComplete();
}
}
include::../{example-root}/R2dbcApp.java[tags=class]
----
====
@ -190,19 +124,19 @@ When you run the main program, the preceding examples produce output similar to @@ -190,19 +124,19 @@ When you run the main program, the preceding examples produce output similar to
====
[source]
----
2018-11-28 10:47:03,893 DEBUG ata.r2dbc.function.DefaultDatabaseClient: 310 - Executing SQL statement [CREATE TABLE person
2018-11-28 10:47:03,893 DEBUG amework.core.r2dbc.DefaultDatabaseClient: 310 - Executing SQL statement [CREATE TABLE person
(id VARCHAR(255) PRIMARY KEY,
name VARCHAR(255),
age INT)]
2018-11-28 10:47:04,074 DEBUG ata.r2dbc.function.DefaultDatabaseClient: 908 - Executing SQL statement [INSERT INTO person (id, name, age) VALUES($1, $2, $3)]
2018-11-28 10:47:04,092 DEBUG ata.r2dbc.function.DefaultDatabaseClient: 575 - Executing SQL statement [SELECT id, name, age FROM person]
2018-11-28 10:47:04,074 DEBUG amework.core.r2dbc.DefaultDatabaseClient: 908 - Executing SQL statement [INSERT INTO person (id, name, age) VALUES($1, $2, $3)]
2018-11-28 10:47:04,092 DEBUG amework.core.r2dbc.DefaultDatabaseClient: 575 - Executing SQL statement [SELECT id, name, age FROM person]
2018-11-28 10:47:04,436 INFO org.spring.r2dbc.example.R2dbcApp: 43 - Person [id='joe', name='Joe', age=34]
----
====
Even in this simple example, there are few things to notice:
* You can create an instance of the central helper class in Spring Data R2DBC (<<r2dbc.datbaseclient,`DatabaseClient`>>) by using a standard `io.r2dbc.spi.ConnectionFactory` object.
* You can create an instance of the central helper class in Spring Data R2DBC (`R2dbcEntityTemplate`) by using a standard `io.r2dbc.spi.ConnectionFactory` object.
* The mapper works against standard POJO objects without the need for any additional metadata (though you can, optionally, provide that information -- see <<mapping,here>>.).
* Mapping conventions can use field access. Notice that the `Person` class has only getters.
* If the constructor argument names match the column names of the stored row, they are used to instantiate the object.
@ -232,7 +166,7 @@ public class ApplicationConfiguration extends AbstractR2dbcConfiguration { @@ -232,7 +166,7 @@ public class ApplicationConfiguration extends AbstractR2dbcConfiguration {
@Override
@Bean
public ConnectionFactory connectionFactory() {
return …;
return …
}
}
----

144
src/main/asciidoc/reference/r2dbc-databaseclient.adoc

@ -1,144 +0,0 @@ @@ -1,144 +0,0 @@
[[r2dbc.datbaseclient]]
= Introduction to `DatabaseClient`
Spring Data R2DBC includes a reactive, non-blocking `DatabaseClient` for database interaction.
The client has a functional, fluent API with reactive types for declarative composition.
`DatabaseClient` encapsulates resource handling (such as opening and closing connections) so that your application code can run SQL queries or call higher-level functionality (such as inserting or selecting data).
NOTE: `DatabaseClient` is a recently developed application component that provides a minimal set of convenience methods that is likely to be extended through time.
NOTE: Once configured, `DatabaseClient` is thread-safe and can be reused across multiple instances.
Another central feature of `DatabaseClient` is the translation of exceptions thrown by R2DBC drivers into Spring's portable Data Access Exception hierarchy. See "`<<r2dbc.exception>>`" for more information.
The next section contains an example of how to work with the `DatabaseClient` in the context of the Spring container.
[[r2dbc.datbaseclient.create]]
== Creating a `DatabaseClient` Object
The simplest way to create a `DatabaseClient` object is through a static factory method, as follows:
====
[source,java]
----
DatabaseClient.create(ConnectionFactory connectionFactory)
----
====
The preceding method creates a `DatabaseClient` with default settings.
You can also obtain a `Builder` instance from `DatabaseClient.builder()`.
You can customize the client by calling the following methods:
* `….exceptionTranslator(…)`: Supply a specific `R2dbcExceptionTranslator` to customize how R2DBC exceptions are translated into Spring's portable Data Access Exception hierarchy.
See "`<<r2dbc.exception>>`" for more information.
* `….dataAccessStrategy(…)`: Set the strategy how SQL queries are generated and how objects are mapped.
Once built, a `DatabaseClient` instance is immutable. However, you can clone it and build a modified copy without affecting the original instance, as the following example shows:
====
[source,java]
----
DatabaseClient client1 = DatabaseClient.builder()
.exceptionTranslator(exceptionTranslatorA).build();
DatabaseClient client2 = client1.mutate()
.exceptionTranslator(exceptionTranslatorB).build();
----
====
== Controlling Database Connections
Spring Data R2DBC obtains a connection to the database through a `ConnectionFactory`.
A `ConnectionFactory` is part of the R2DBC specification and is a generalized connection factory.
It lets a container or a framework hide connection pooling and transaction management issues from the application code.
When you use Spring Data R2DBC, you can create a `ConnectionFactory` by using your R2DBC driver.
`ConnectionFactory` implementations can either return the same connection or different connections or provide connection pooling.
`DatabaseClient` uses `ConnectionFactory` to create and release connections for each operation without affinity to a particular connection across multiple operations.
Assuming you use H2 as a database, a typical programmatic setup looks something like the following listing:
====
[source, java]
----
H2ConnectionConfiguration config = … <1>
ConnectionFactory factory = new H2ConnectionFactory(config); <2>
DatabaseClient client = DatabaseClient.create(factory); <3>
----
<1> Prepare the database specific configuration (host, port, credentials etc.)
<2> Create a connection factory using that configuration.
<3> Create a `DatabaseClient` to use that connection factory.
====
[[r2dbc.exception]]
= Exception Translation
The Spring framework provides exception translation for a wide variety of database and mapping technologies.
The Spring support for R2DBC extends this feature by providing implementations of the `R2dbcExceptionTranslator` interface.
`R2dbcExceptionTranslator` is an interface to be implemented by classes that can translate between `R2dbcException` and Spring’s own `org.springframework.dao.DataAccessException`, which is agnostic in regard to data access strategy.
Implementations can be generic (for example, using SQLState codes) or proprietary (for example, using Postgres error codes) for greater precision.
`R2dbcExceptionSubclassTranslator` is the implementation of `R2dbcExceptionTranslator` that is used by default.
It considers R2DBC's categorized exception hierarchy to translate these into Spring's consistent exception hierarchy.
`R2dbcExceptionSubclassTranslator` uses `SqlStateR2dbcExceptionTranslator` as its fallback if it is not able to translate an exception.
`SqlErrorCodeR2dbcExceptionTranslator` uses specific vendor codes by using Spring JDBC's `SQLErrorCodes`.
It is more precise than the `SQLState` implementation.
The error code translations are based on codes held in a JavaBean type class called `SQLErrorCodes`.
Instances of this class are created and populated by an `SQLErrorCodesFactory`, which (as the name suggests) is a factory for creating `SQLErrorCodes` based on the contents of a configuration file named `sql-error-codes.xml` from Spring's Data Access module.
This file is populated with vendor codes and based on the `ConnectionFactoryName` taken from `ConnectionFactoryMetadata`.
The codes for the actual database you are using are used.
The `SqlErrorCodeR2dbcExceptionTranslator` applies matching rules in the following sequence:
. Any custom translation implemented by a subclass.
Normally, the provided concrete `SqlErrorCodeR2dbcExceptionTranslator` is used, so this rule does not apply.
It applies only if you have actually provided a subclass implementation.
. Any custom implementation of the `SQLExceptionTranslator` interface that is provided as the `customSqlExceptionTranslator` property of the `SQLErrorCodes` class.
. Error code matching is applied.
. Use a fallback translator.
NOTE: By default, the `SQLErrorCodesFactory` is used to define error codes and custom exception translations.
They are looked up from a file named `sql-error-codes.xml` (which must be on the classpath), and the matching `SQLErrorCodes` instance is located based on the database name from the database metadata of the database in use.
`SQLErrorCodesFactory` requires Spring JDBC.
You can extend `SqlErrorCodeR2dbcExceptionTranslator`, as the following example shows:
====
[source,java]
----
public class CustomSqlErrorCodeR2dbcExceptionTranslator extends SqlErrorCodeR2dbcExceptionTranslator {
protected DataAccessException customTranslate(String task, String sql, R2dbcException r2dbcex) {
if (sqlex.getErrorCode() == -12345) {
return new DeadlockLoserDataAccessException(task, r2dbcex);
}
return null;
}
}
----
====
In the preceding example, the specific error code (`-12345`) is translated, while other errors are left to be translated by the default translator implementation.
To use this custom translator, you must configure `DatabaseClient` through the `exceptionTranslator` builder method, and you must use this `DatabaseClient` for all of the data access processing where this translator is needed.
The following example shows how you can use this custom translator:
====
[source,java]
----
ConnectionFactory connectionFactory = …;
CustomSqlErrorCodeR2dbcExceptionTranslator exceptionTranslator =
new CustomSqlErrorCodeR2dbcExceptionTranslator();
DatabaseClient client = DatabaseClient.builder()
.connectionFactory(connectionFactory)
.exceptionTranslator(exceptionTranslator)
.build();
----
====

270
src/main/asciidoc/reference/r2dbc-fluent.adoc

@ -1,270 +0,0 @@ @@ -1,270 +0,0 @@
[[r2dbc.datbaseclient.fluent-api]]
= Fluent Data Access API
The SQL API of `DatabaseClient` offers you maximum flexibility to run any type of SQL.
`DatabaseClient` provides a more narrow interface for typical ad-hoc use-cases, such as querying, inserting, updating, and deleting data.
The entry points (`insert()`, `select()`, `update()`, and others) follow a natural naming schema based on the operation to be run.
Moving on from the entry point, the API is designed to offer only context-dependent methods that lead to a terminating method that creates and runs a SQL statement.
Spring Data R2DBC uses a `R2dbcDialect` abstraction to determine bind markers, pagination support and the data types natively supported by the underlying driver.
Consider the following simple query:
====
[source,java]
----
Flux<Person> people = databaseClient.select()
.from(Person.class) <1>
.fetch()
.all(); <2>
----
<1> Using `Person` with the `from(…)` method sets the `FROM` table based on mapping metadata.
It also maps tabular results on `Person` result objects.
<2> Fetching `all()` rows returns a `Flux<Person>` without limiting results.
====
The following example declares a more complex query that specifies the table name by name, a `WHERE` condition, and an `ORDER BY` clause:
====
[source,java]
----
Mono<Person> first = databaseClient.select()
.from("legoset") <1>
.matching(where("firstname").is("John") <2>
.and("lastname").in("Doe", "White"))
.orderBy(desc("id")) <3>
.as(Person.class)
.fetch()
.one(); <4>
----
<1> Selecting from a table by name returns row results as `Map<String, Object>` with case-insensitive column name matching.
<2> The issued query declares a `WHERE` condition on `firstname` and `lastname` columns to filter results.
<3> Results can be ordered by individual column names, resulting in an `ORDER BY` clause.
<4> Selecting the one result fetches only a single row.
This way of consuming rows expects the query to return exactly a single result.
`Mono` emits a `IncorrectResultSizeDataAccessException` if the query yields more than a single result.
====
TIP: You can directly apply <<projections,Projections>> to result documents by providing the target type via `as(Class<?>)`.
You can consume Query results in three ways:
* Through object mapping (for example, `as(Class<T>)`) by using Spring Data's mapping-metadata.
* As `Map<String, Object>` where column names are mapped to their value.
Column names are looked up in a case-insensitive way.
* By supplying a mapping `BiFunction` for direct access to R2DBC `Row` and `RowMetadata`.
You can switch between retrieving a single entity and retrieving multiple entities through the following terminating methods:
* `first()`: Consume only the first row, returning a `Mono`.
The returned `Mono` completes without emitting an object if the query returns no results.
* `one()`: Consume exactly one row, returning a `Mono`.
The returned `Mono` completes without emitting an object if the query returns no results.
If the query returns more than one row, `Mono` completes exceptionally emitting `IncorrectResultSizeDataAccessException`.
* `all()`: Consume all returned rows returning a `Flux`.
* `rowsUpdated`: Consume the number of affected rows.
It is typically used with `INSERT`,`UPDATE`, and `DELETE` statements.
[[r2dbc.datbaseclient.fluent-api.select]]
== Selecting Data
You can use the `select()` entry point to express your `SELECT` queries.
The resulting `SELECT` queries support the commonly used clauses (`WHERE` and `ORDER BY`) and support pagination.
The fluent API style let you chain together multiple methods while having easy-to-understand code.
To improve readability, you can use static imports that let you avoid using the 'new' keyword for creating `Criteria` instances.
[r2dbc.datbaseclient.fluent-api.criteria]]
==== Methods for the Criteria Class
The `Criteria` class provides the following methods, all of which correspond to SQL operators:
* `Criteria` *and* `(String column)`: Adds a chained `Criteria` with the specified `property` to the current `Criteria` and returns the newly created one.
* `Criteria` *or* `(String column)`: Adds a chained `Criteria` with the specified `property` to the current `Criteria` and returns the newly created one.
* `Criteria` *greaterThan* `(Object o)`: Creates a criterion by using the `>` operator.
* `Criteria` *greaterThanOrEquals* `(Object o)`: Creates a criterion by using the `>=` operator.
* `Criteria` *in* `(Object... o)`: Creates a criterion by using the `IN` operator for a varargs argument.
* `Criteria` *in* `(Collection<?> collection)`: Creates a criterion by using the `IN` operator using a collection.
* `Criteria` *is* `(Object o)`: Creates a criterion by using column matching (`property = value`).
* `Criteria` *isNull* `()`: Creates a criterion by using the `IS NULL` operator.
* `Criteria` *isNotNull* `()`: Creates a criterion by using the `IS NOT NULL` operator.
* `Criteria` *lessThan* `(Object o)`: Creates a criterion by using the `<` operator.
* `Criteria` *lessThanOrEquals* `(Object o)`: Creates a criterion by using the `<=` operator.
* `Criteria` *like* `(Object o)`: Creates a criterion by using the `LIKE` operator without escape character processing.
* `Criteria` *not* `(Object o)`: Creates a criterion by using the `!=` operator.
* `Criteria` *notIn* `(Object... o)`: Creates a criterion by using the `NOT IN` operator for a varargs argument.
* `Criteria` *notIn* `(Collection<?> collection)`: Creates a criterion by using the `NOT IN` operator using a collection.
You can use `Criteria` with `SELECT`, `UPDATE`, and `DELETE` queries.
[r2dbc.datbaseclient.fluent-api.select.methods]]
==== Methods for `SELECT` operations
The `select()` entry point exposes some additional methods that provide options for the query:
* *from* `(Class<T>)`: Specifies the source table by using a mapped object.
By default, it returns results as `T`.
* *from* `(String)`: Specifies the source table name.
By default, it returns results as `Map<String, Object>`.
* *as* `(Class<T>)`: Maps results to `T`.
* *map* `(BiFunction<Row, RowMetadata, T>)`: Supplies a mapping function to extract results.
* *project* `(String... columns)`: Specifies which columns to return.
* *matching* `(Criteria)`: Declares a `WHERE` condition to filter results.
* *orderBy* `(Order)`: Declares an `ORDER BY` clause to sort results.
* *page* `(Page pageable)`: Retrieves a particular page within the result.
It limits the size of the returned results and reads from an offset.
* *fetch* `()`: Transition call declaration to the fetch stage to declare result consumption multiplicity.
[[r2dbc.datbaseclient.fluent-api.insert]]
== Inserting Data
You can use the `insert()` entry point to insert data. Similar to `select()`, `insert()` allows free-form and mapped object inserts.
Consider the following simple typed insert operation:
====
[source,java]
----
Mono<Void> insert = databaseClient.insert()
.into(Person.class) <1>
.using(new Person(…)) <2>
.then(); <3>
----
<1> Using `Person` with the `into(…)` method sets the `INTO` table, based on mapping metadata.
It also prepares the insert statement to accept `Person` objects for inserting.
<2> Provide a scalar `Person` object.
Alternatively, you can supply a `Publisher` to run a stream of `INSERT` statements.
This method extracts all non-`null` values and inserts them.
<3> Use `then()` to insert an object without consuming further details.
Modifying statements allow consumption of the number of affected rows or tabular results for consuming generated keys.
====
Inserts also support untyped operations, as the following example shows:
====
[source,java]
----
Mono<Void> insert = databaseClient.insert()
.into("person") <1>
.value("firstname", "John") <2>
.nullValue("lastname") <3>
.then(); <4>
----
<1> Start an insert into the `person` table.
<2> Provide a non-null value for `firstname`.
<3> Set `lastname` to `null`.
<3> Use `then()` to insert an object without consuming further details.
Modifying statements allow consumption of the number of affected rows or tabular results for consuming generated keys.
====
[[r2dbc.datbaseclient.fluent-api.insert.methods]]
=== Methods for INSERT operations
The `insert()` entry point exposes the following additional methods to provide options for the operation:
* *into* `(Class<T>)`: Specifies the target table using a mapped object.
By default, it returns results as `T`.
* *into* `(String)`: Specifies the target table name.
By default, it returns results as `Map<String, Object>`.
* *using* `(T)`: Specifies the object to insert.
* *using* `(Publisher<T>)`: Accepts a stream of objects to insert.
* *table* `(String)`: Overrides the target table name.
* *value* `(String, Object)`: Provides a column value to insert.
* *nullValue* `(String)`: Provides a null value to insert.
* *map* `(BiFunction<Row, RowMetadata, T>)`: Supplies a mapping function to extract results.
* *then* `()`: Runs `INSERT` without consuming any results.
* *fetch* `()`: Transition call declaration to the fetch stage to declare result consumption multiplicity.
[[r2dbc.datbaseclient.fluent-api.update]]
== Updating Data
You can use the `update()` entry point to update rows.
Updating data starts by specifying the table to update by accepting `Update` specifying assignments.
It also accepts `Criteria` to create a `WHERE` clause.
Consider the following simple typed update operation:
====
[source,java]
----
Person modified = …
Mono<Void> update = databaseClient.update()
.table(Person.class) <1>
.using(modified) <2>
.then(); <3>
----
<1> Using `Person` with the `table(…)` method sets the table to update based on mapping metadata.
<2> Provide a scalar `Person` object value.
`using(…)` accepts the modified object and derives primary keys and updates all column values.
<3> Use `then()` to update the rows of an object without consuming further details.
Modifying statements also allow consumption of the number of affected rows.
====
Update also supports untyped operations, as the following example shows:
====
[source,java]
----
Mono<Void> update = databaseClient.update()
.table("person") <1>
.using(Update.update("firstname", "Jane")) <2>
.matching(where("firstname").is("John")) <3>
.then(); <4>
----
<1> Update the `person` table.
<2> Provide a, `Update` definition of which columns to update.
<3> The issued query declares a `WHERE` condition on `firstname` columns to filter the rows to update.
<4> Use `then()` to update the rows of an object without consuming further details.
Modifying statements also allow consumption of the number of affected rows.
====
[[r2dbc.datbaseclient.fluent-api.update.methods]]
=== Methods for UPDATE operations
The `update()` entry point exposes the following additional methods to provide options for the operation:
* *table* `(Class<T>)`: Specifies the target table byusing a mapped object.
Returns results by default as `T`.
* *table* `(String)`: Specifies the target table name.
By default, it returns results as `Map<String, Object>`.
* *using* `(T)`Specifies the object to update.
It derives criteria itself.
* *using* `(Update)`: Specifies the update definition.
* *matching* `(Criteria)`: Declares a `WHERE` condition to indicate which rows to update.
* *then* `()`: Runs the `UPDATE` without consuming any results.
* *fetch* `()`: Transition call declaration to the fetch stage to fetch the number of updated rows.
[[r2dbc.datbaseclient.fluent-api.delete]]
== Deleting Data
You can use the `delete()` entry point to delete rows.
Removing data starts with a specification of the table to delete from and, optionally, accepts a `Criteria` to create a `WHERE` clause.
Consider the following simple insert operation:
====
[source,java]
----
Mono<Void> delete = databaseClient.delete()
.from(Person.class) <1>
.matching(where("firstname").is("John") <2>
.and("lastname").in("Doe", "White"))
.then(); <3>
----
<1> Using `Person` with the `from(…)` method sets the `FROM` table, based on mapping metadata.
<2> The issued query declares a `WHERE` condition on `firstname` and `lastname` columns to filter rows to delete.
<3> Use `then()` to delete rows from an object without consuming further details.
Modifying statements also allow consumption of the number of affected rows.
====
[[r2dbc.datbaseclient.fluent-api.delete.methods]]
=== Methods for DELETE operations
The `delete()` entry point exposes the following additional methods to provide options for the operation:
* *from* `(Class<T>)`: Specifies the target table by using a mapped object.
By default, it returns results as `T`.
* *from* `(String)`: Specifies the target table name. By default, it returns results as `Map<String, Object>`.
* *matching* `(Criteria)`: Declares a `WHERE` condition to define the rows to delete.
* *then* `()`: Runs the `DELETE` without consuming any results.
* *fetch* `()`: Transition call declaration to the fetch stage to fetch the number of deleted rows.

102
src/main/asciidoc/reference/r2dbc-initialization.adoc

@ -1,102 +0,0 @@ @@ -1,102 +0,0 @@
[[r2dbc.init]]
= Initializing a `ConnectionFactory`
The `org.springframework.data.r2dbc.connectionfactory.init` package provides support for initializing an existing `ConnectionFactory`.
You may sometimes need to initialize an instance that runs on a server somewhere or an embedded database.
== Initializing a Database by Using @Bean methods
If you want to initialize a database and you can provide a reference to a `ConnectionFactory` bean, you can use the
.Using `ConnectionFactoryInitializer` to initialize a `ConnectionFactory`
====
[source,java]
----
@Configuration
public class InitializerConfiguration {
@Bean
public ConnectionFactoryInitializer initializer(ConnectionFactory connectionFactory) {
ConnectionFactoryInitializer initializer = new ConnectionFactoryInitializer();
initializer.setConnectionFactory(connectionFactory);
CompositeDatabasePopulator populator = new CompositeDatabasePopulator();
populator.addPopulators(new ResourceDatabasePopulator(new ClassPathResource("com/foo/sql/db-schema.sql")));
populator.addPopulators(new ResourceDatabasePopulator(new ClassPathResource("com/foo/sql/test-data1.sql")));
initializer.setDatabasePopulator(populator);
return initializer;
}
}
----
====
The preceding example runs the two specified scripts against the database.
The first script creates a schema, and the second populates tables with a test data set.
The default behavior of the database initializer is to unconditionally run the provided scripts.
This may not always be what you wantfor instance, if you run the scripts against a database that already has test data in it.
The likelihood of accidentally deleting data is reduced by following the common pattern (shown earlier) of creating the tables first and then inserting the data.
The first step fails if the tables already exist.
However, to gain more control over the creation and deletion of existing data, `ConnectionFactoryInitializer` and `ResourceDatabasePopulator` support various switches such as switching the initialization on and off.
Each statement should be separated by `;` or a new line if the `;` character is not present at all in the script. You can control that globally or script by script, as the following example shows:
.Customizing statement separators
====
[source,java]
----
@Configuration
public class InitializerConfiguration {
@Bean
public ConnectionFactoryInitializer initializer(ConnectionFactory connectionFactory) {
ConnectionFactoryInitializer initializer = new ConnectionFactoryInitializer();
initializer.setConnectionFactory(connectionFactory);
ResourceDatabasePopulator populator = new ResourceDatabasePopulator(new ClassPathResource("com/foo/sql/db-schema.sql"));
populator.setSeparator("@@"); <1>
initializer.setDatabasePopulator(populator);
return initializer;
}
}
----
<1> Set the separator scripts to `@@`.
====
In this example, the schema scripts uses `@@` as statement separator.
=== Initialization of Other Components that Depend on the Database
A large class of applications (those that do not use the database until after the Spring context has started) can use the database initializer with no further complications.
If your application is not one of those, you might need to read the rest of this section.
The database initializer depends on a `ConnectionFactory` instance and runs the scripts provided in its initialization callback (analogous to an `init-method` in an XML bean definition, a `@PostConstruct` method in a component, or the `afterPropertiesSet()` method in a component that implements `InitializingBean`).
If other beans depend on the same data source and use the data source in an initialization callback, there might be a problem because the data has not yet been initialized.
A common example of this is a cache that initializes eagerly and loads data from the database on application startup.
To get around this issue, you have two options:
1. change your cache initialization strategy to a later phase or
2. ensure that the database initializer is initialized first
Changing your cache initialization strategy might be easy if the application is in your control and not otherwise. Some suggestions for how to implement this include:
* Make the cache initialize lazily on first usage, which improves application startup time.
* Have your cache or a separate component that initializes the cache implement Lifecycle or SmartLifecycle.
When the application context starts, you can automatically start a `SmartLifecycle` by setting its `autoStartup` flag, and you can manually start a Lifecycle by calling `ConfigurableApplicationContext.start()` on the enclosing context.
* Use a Spring `ApplicationEvent` or similar custom observer mechanism to trigger the cache initialization.
`ContextRefreshedEvent` is always published by the context when it is ready for use (after all beans have been initialized), so that is often a useful hook (this is how the `SmartLifecycle` works by default).
Ensuring that the database initializer is initialized first can also be easy.
Some suggestions on how to implement this include:
* Rely on the default behavior of the Spring `BeanFactory`, which is that beans are initialized in registration order.
You can easily arrange that by adopting the common practice of a set of `@Import` configuration that order your application modules and ensuring that the database and database initialization are listed first.
* Separate the `ConnectionFactory` and the business components that use it and control their startup order by putting them in separate `ApplicationContext` instances (for example, the parent context contains the `ConnectionFactory`, and the child context contains the business components).

63
src/main/asciidoc/reference/r2dbc-repositories.adoc

@ -33,7 +33,7 @@ The following example shows a repository interface for the preceding `Person` cl @@ -33,7 +33,7 @@ The following example shows a repository interface for the preceding `Person` cl
.Basic repository interface to persist Person entities
====
[source]
[source,java]
----
public interface PersonRepository extends ReactiveCrudRepository<Person, Long> {
@ -56,7 +56,7 @@ class ApplicationConfig extends AbstractR2dbcConfiguration { @@ -56,7 +56,7 @@ class ApplicationConfig extends AbstractR2dbcConfiguration {
@Override
public ConnectionFactory connectionFactory() {
return …;
return …
}
}
----
@ -69,32 +69,9 @@ Consequently, you can retrieve all `Person` objects with the following code: @@ -69,32 +69,9 @@ Consequently, you can retrieve all `Person` objects with the following code:
.Paging access to Person entities
====
[source,java]
[source,java,indent=0]
----
@RunWith(SpringRunner.class)
@ContextConfiguration
public class PersonRepositoryTests {
@Autowired PersonRepository repository;
@Test
public void readsAllEntitiesCorrectly() {
repository.findAll()
.as(StepVerifier::create)
.expectNextCount(1)
.verifyComplete();
}
@Test
public void readsEntitiesByNameCorrectly() {
repository.findByFirstname("Hello World")
.as(StepVerifier::create)
.expectNextCount(1)
.verifyComplete();
}
}
include::../{example-root}/PersonRepositoryTests.java[tags=class]
----
====
@ -135,7 +112,7 @@ interface ReactivePersonRepository extends ReactiveSortingRepository<Person, Lon @@ -135,7 +112,7 @@ interface ReactivePersonRepository extends ReactiveSortingRepository<Person, Lon
<2> The method shows a query for all people with the given `firstname` once the `firstname` is emitted by the given `Publisher`.
<3> Use `Pageable` to pass offset and sorting parameters to the database.
<4> Find a single entity for the given criteria. It completes with `IncorrectResultSizeDataAccessException` on non-unique results.
<5> Unless <4>, the first entity is always emitted even if the query yields more result documents.
<5> Unless <4>, the first entity is always emitted even if the query yields more result rows.
<6> The `findByLastname` method shows a query for all people with the given last name.
<7> A query for a single `Person` entity projecting only `firstname` and `lastname` columns.
The annotated query uses native bind markers, which are Postgres bind markers in this example.
@ -172,7 +149,7 @@ The following table shows the keywords that are supported for query methods: @@ -172,7 +149,7 @@ The following table shows the keywords that are supported for query methods:
| `LessThanEqual`
| `findByAgeLessThanEqual(int age)`
| `age <= age`
| `age \<= age`
| `Between`
| `findByAgeBetween(int from, int to)`
@ -258,11 +235,9 @@ interface ReactivePersonRepository extends ReactiveSortingRepository<Person, Str @@ -258,11 +235,9 @@ interface ReactivePersonRepository extends ReactiveSortingRepository<Person, Str
As this approach is feasible for comprehensive custom functionality, you can modify queries that only need parameter binding by annotating the query method with `@Modifying`, as shown in the following example:
====
[source,java]
[source,java,indent=0]
----
@Modifying
@Query("UPDATE person SET firstname = :firstname where lastname = :lastname")
Mono<Integer> setFixedFirstnameFor(String firstname, String lastname);
include::../{example-root}/PersonRepository.java[tags=atModifying]
----
====
@ -287,14 +262,12 @@ Expressions expose method arguments through an array that contains all the argum @@ -287,14 +262,12 @@ Expressions expose method arguments through an array that contains all the argum
The following query uses `[0]`
to declare the predicate value for `lastname` (which is equivalent to the `:lastname` parameter binding):
[source,java]
====
[source,java,indent=0]
----
public interface PersonRepository extends ReactiveCrudRepository<Person, String> {
@Query("SELECT * FROM person WHERE lastname = :#{[0]}")
List<Person> findByQueryWithExpression(String lastname);
}
include::../{example-root}/PersonRepository.java[tags=spel]
----
====
SpEL in query strings can be a powerful way to enhance queries.
However, they can also accept a broad range of unwanted arguments.
@ -342,8 +315,8 @@ With auto-increment columns, this happens automatically, because the ID gets set @@ -342,8 +315,8 @@ With auto-increment columns, this happens automatically, because the ID gets set
[[r2dbc.optimistic-locking]]
=== Optimistic Locking
The `@Version` annotation provides syntax similar to that of JPA in the context of R2DBC and makes sure updates are only applied to documents with a matching version.
Therefore, the actual value of the version property is added to the update query in such a way that the update does not have any effect if another operation altered the document in the meantime.
The `@Version` annotation provides syntax similar to that of JPA in the context of R2DBC and makes sure updates are only applied to rows with a matching version.
Therefore, the actual value of the version property is added to the update query in such a way that the update does not have any effect if another operation altered the row in the meantime.
In that case, an `OptimisticLockingFailureException` is thrown.
The following example shows these features:
@ -364,8 +337,8 @@ R2dbcEntityTemplate template = …; @@ -364,8 +337,8 @@ R2dbcEntityTemplate template = …;
Mono<Person> daenerys = template.insert(new Person("Daenerys")); <1>
Person other = template.select(Person.class)
.matching(query(where("id").is(daenerys.getId())))
.first().block(); <2>
.matching(query(where("id").is(daenerys.getId())))
.first().block(); <2>
daenerys.setLastname("Targaryen");
template.update(daenerys); <3>
@ -375,7 +348,7 @@ template.update(other).subscribe(); // emits OptimisticLockingFailureException @@ -375,7 +348,7 @@ template.update(other).subscribe(); // emits OptimisticLockingFailureException
<1> Initially insert row. `version` is set to `0`.
<2> Load the just inserted row. `version` is still `0`.
<3> Update the row with `version = 0`.Set the `lastname` and bump `version` to `1`.
<4> Try to update the previously loaded document that still has `version = 0`.The operation fails with an `OptimisticLockingFailureException`, as the current `version` is `1`.
<4> Try to update the previously loaded row that still has `version = 0`.The operation fails with an `OptimisticLockingFailureException`, as the current `version` is `1`.
====
:projection-collection: Flux
@ -403,7 +376,7 @@ static class MySQLConfiguration { @@ -403,7 +376,7 @@ static class MySQLConfiguration {
@Bean
@Qualifier("mysql")
public ConnectionFactory mysqlConnectionFactory() {
return …;
return …
}
@Bean

208
src/main/asciidoc/reference/r2dbc-sql.adoc

@ -1,208 +0,0 @@ @@ -1,208 +0,0 @@
[[r2dbc.datbaseclient.statements]]
= Executing Statements
`DatabaseClient` provides the basic functionality of running a statement.
The following example shows what you need to include for minimal but fully functional code that creates a new table:
====
[source,java]
----
Mono<Void> completion = client.execute("CREATE TABLE person (id VARCHAR(255) PRIMARY KEY, name VARCHAR(255), age INTEGER);")
.then();
----
====
`DatabaseClient` is designed for convenient, fluent usage.
It exposes intermediate, continuation, and terminal methods at each stage of the execution specification.
The preceding example above uses `then()` to return a completion `Publisher` that completes as soon as the query (or queries, if the SQL query contains multiple statements) completes.
NOTE: `execute(…)` accepts either the SQL query string or a query `Supplier<String>` to defer the actual query creation until the query is sent to the database.
[[r2dbc.datbaseclient.queries]]
== Running Queries
SQL queries can return values or the number of affected rows.
`DatabaseClient` can return the number of updated rows or the rows themselves, depending on the issued query.
The following example shows an `UPDATE` statement that returns the number of updated rows:
====
[source,java]
----
Mono<Integer> affectedRows = client.execute("UPDATE person SET name = 'Joe'")
.fetch().rowsUpdated();
----
====
Running a `SELECT` query returns a different type of result, in particular tabular results.
Tabular data is typically consumed by streaming each `Row`.
You might have noticed the use of `fetch()` in the previous example.
`fetch()` is a continuation operator that lets you specify how much data you want to consume.
====
[source,java]
----
Mono<Map<String, Object>> first = client.execute("SELECT id, name FROM person")
.fetch().first();
----
====
Calling `first()` returns the first row from the result and discards remaining rows.
You can consume data with the following operators:
* `first()` return the first row of the entire result.
* `one()` returns exactly one result and fails if the result contains more rows.
* `all()` returns all rows of the result.
* `rowsUpdated()` returns the number of affected rows (`INSERT` count, `UPDATE` count).
By default, `DatabaseClient` queries return their results as `Map` of column name to value.
You can customize type mapping by applying an `as(Class<T>)` operator, as follows:
====
[source,java]
----
Flux<Person> all = client.execute("SELECT id, name FROM mytable")
.as(Person.class)
.fetch().all();
----
====
`as(…)` applies <<mapping.conventions,Convention-based Object Mapping>> and maps the resulting columns to your POJO.
[[r2dbc.datbaseclient.mapping]]
== Mapping Results
You can customize result extraction beyond `Map` and POJO result extraction by providing an extractor `BiFunction<Row, RowMetadata, T>`.
The extractor function interacts directly with R2DBC's `Row` and `RowMetadata` objects and can return arbitrary values (singular values, collections and maps, and objects).
The following example extracts the `id` column and emits its value:
====
[source,java]
----
Flux<String> names = client.execute("SELECT name FROM person")
.map((row, rowMetadata) -> row.get("id", String.class))
.all();
----
====
[[r2dbc.datbaseclient.mapping.null]]
.What about `null`?
****
Relational database results can contain `null` values.
The Reactive Streams specification forbids the emission of `null` values.
That requirement mandates proper `null` handling in the extractor function.
While you can obtain `null` values from a `Row`, you must not emit a `null` value.
You must wrap any `null` values in an object (for example, `Optional` for singular values) to make sure a `null` value is never returned directly by your extractor function.
****
[[r2dbc.datbaseclient.binding]]
== Binding Values to Queries
A typical application requires parameterized SQL statements to select or update rows according to some input.
These are typically `SELECT` statements constrained by a `WHERE` clause or `INSERT` and `UPDATE` statements that accept input parameters.
Parameterized statements bear the risk of SQL injection if parameters are not escaped properly.
`DatabaseClient` leverages R2DBC's `bind` API to eliminate the risk of SQL injection for query parameters.
You can provide a parameterized SQL statement with the `execute(…)` operator and bind parameters to the actual `Statement`.
Your R2DBC driver then runs the statement by using prepared statements and parameter substitution.
Parameter binding supports two binding strategies:
* By Index, using zero-based parameter indexes.
* By Name, using the placeholder name.
The following example shows parameter binding for a query:
====
[source,java]
----
db.execute("INSERT INTO person (id, name, age) VALUES(:id, :name, :age)")
.bind("id", "joe")
.bind("name", "Joe")
.bind("age", 34);
----
====
.R2DBC Native Bind Markers
****
R2DBC uses database-native bind markers that depend on the actual database vendor.
As an example, Postgres uses indexed markers, such as `$1`, `$2`, `$n`.
Another example is SQL Server, which uses named bind markers prefixed with `@`.
This is different from JDBC, which requires `?` as bind markers.
In JDBC, the actual drivers translate `?` bind markers to database-native markers as part of processing the statement.
Spring Data R2DBC lets you use native bind markers or named bind markers with the `:name` syntax.
Named parameter support uses a `R2dbcDialect` instance to expand named parameters to native bind markers at the time of running the query, which gives you a certain degree of query portability across various database vendors.
****
The query-preprocessor unrolls named `Collection` parameters into a series of bind markers to remove the need of dynamic query creation based on the number of arguments.
Nested object arrays are expanded to allow usage of (for example) select lists.
Consider the following query:
====
[source,sql]
----
SELECT id, name, state FROM table WHERE (name, age) IN (('John', 35), ('Ann', 50))
----
====
The preceding query can be parametrized and run as follows:
====
[source,java]
----
List<Object[]> tuples = new ArrayList<>();
tuples.add(new Object[] {"John", 35});
tuples.add(new Object[] {"Ann", 50});
db.execute("SELECT id, name, state FROM table WHERE (name, age) IN (:tuples)")
.bind("tuples", tuples)
----
====
NOTE: Usage of select lists is vendor-dependent.
The following example shows a simpler variant using `IN` predicates:
====
[source,java]
----
db.execute("SELECT id, name, state FROM table WHERE age IN (:ages)")
.bind("ages", Arrays.asList(35, 50))
----
====
[[r2dbc.datbaseclient.filter]]
== Statement Filters
You can register a `Statement` filter (`StatementFilterFunction`) through `DatabaseClient` to intercept and modify statements when they run, as the following example shows:
====
[source,java]
----
db.execute("INSERT INTO table (name, state) VALUES(:name, :state)")
.filter((s, next) -> next.execute(s.returnGeneratedValues("id")))
.bind("name", …)
.bind("state", …)
----
====
`DatabaseClient` exposes also simplified `filter(…)` overload accepting `UnaryOperator<Statement>`:
====
[source,java]
----
db.execute("INSERT INTO table (name, state) VALUES(:name, :state)")
.filter(s -> s.returnGeneratedValues("id"))
.bind("name", …)
.bind("state", …)
db.execute("SELECT id, name, state FROM table")
.filter(s -> s.fetchSize(25))
----
====
`StatementFilterFunction` allows filtering of the `Statement` and filtering of the `Result` objects.

194
src/main/asciidoc/reference/r2dbc-template.adoc

@ -0,0 +1,194 @@ @@ -0,0 +1,194 @@
[[r2dbc.datbaseclient.fluent-api]]
[[r2dbc.entityoperations]]
= R2dbcEntityOperations Data Access API
`R2dbcEntityTemplate` is the central entrypoint for Spring Data R2DBC.
It provides direct entity-oriented methods and a more narrow, fluent interface for typical ad-hoc use-cases, such as querying, inserting, updating, and deleting data.
The entry points (`insert()`, `select()`, `update()`, and others) follow a natural naming schema based on the operation to be run.
Moving on from the entry point, the API is designed to offer only context-dependent methods that lead to a terminating method that creates and runs a SQL statement.
Spring Data R2DBC uses a `R2dbcDialect` abstraction to determine bind markers, pagination support and the data types natively supported by the underlying driver.
NOTE: All terminal methods return always a `Publisher` type that represents the desired operation.
The actual statements are sent to the database upon subscription.
[[r2dbc.entityoperations.save-insert]]
== Methods for Inserting and Updating Entities
There are several convenient methods on `R2dbcEntityTemplate` for saving and inserting your objects.
To have more fine-grained control over the conversion process, you can register Spring converters with `R2dbcCustomConversions` -- for example `Converter<Person, OutboundRow>` and `Converter<Row, Person>`.
The simple case of using the save operation is to save a POJO. In this case, the table name is determined by name (not fully qualified) of the class.
You may also call the save operation with a specific collection name.
You can use mapping metadata to override the collection in which to store the object.
When inserting or saving, if the `Id` property is not set, the assumption is that its value will be auto-generated by the database.
Consequently, for auto-generation the type of the `Id` property or field in your class must be a `Long`, or `Integer`.
The following example shows how to insert a row and retrieving its contents:
.Inserting and retrieving entities using the `R2dbcEntityTemplate`
====
[source,java,indent=0]
----
include::../{example-root}/R2dbcEntityTemplateSnippets.java[tags=insertAndSelect]
----
====
The following insert and update operations are available:
A similar set of insert operations is also available:
* `Mono<T>` *insert* `(T objectToSave)`: Insert the object to the default table.
* `Mono<T>` *update* `(T objectToSave)`: Insert the object to the default table.
Table names can be customized by using the fluent API.
[[r2dbc.entityoperations.selecting]]
== Selecting Data
The `select(…)` and `selectOne(…)` methods on `R2dbcEntityTemplate` are used to select data from a table.
Both methods take a <<r2dbc.datbaseclient.fluent-api.criteria,`Query`>> object that defines the field projection, the `WHERE` clause, the `ORDER BY` clause and limit/offset pagination.
Limit/offset functionality is transparent to the application regardless of the underlying database.
This functionality is supported by the <<r2dbc.drivers,`R2dbcDialect` abstraction>> to cater for differences between the individual SQL flavors.
.Selecting entities using the `R2dbcEntityTemplate`
====
[source,java,indent=0]
----
include::../{example-root}/R2dbcEntityTemplateSnippets.java[tags=select]
----
====
[[r2dbc.entityoperations.fluent-api]]
== Fluent API
This section explains the fluent API usage.
Consider the following simple query:
====
[source,java,indent=0]
----
include::../{example-root}/R2dbcEntityTemplateSnippets.java[tags=simpleSelect]
----
<1> Using `Person` with the `from(…)` method sets the `FROM` table based on mapping metadata.
It also maps tabular results on `Person` result objects.
<2> Fetching `all()` rows returns a `Flux<Person>` without limiting results.
====
The following example declares a more complex query that specifies the table name by name, a `WHERE` condition, and an `ORDER BY` clause:
====
[source,java,indent=0]
----
include::../{example-root}/R2dbcEntityTemplateSnippets.java[tags=fullSelect]
----
<1> Selecting from a table by name returns row results using the given domain type.
<2> The issued query declares a `WHERE` condition on `firstname` and `lastname` columns to filter results.
<3> Results can be ordered by individual column names, resulting in an `ORDER BY` clause.
<4> Selecting the one result fetches only a single row.
This way of consuming rows expects the query to return exactly a single result.
`Mono` emits a `IncorrectResultSizeDataAccessException` if the query yields more than a single result.
====
TIP: You can directly apply <<projections,Projections>> to results by providing the target type via `select(Class<?>)`.
You can switch between retrieving a single entity and retrieving multiple entities through the following terminating methods:
* `first()`: Consume only the first row, returning a `Mono`.
The returned `Mono` completes without emitting an object if the query returns no results.
* `one()`: Consume exactly one row, returning a `Mono`.
The returned `Mono` completes without emitting an object if the query returns no results.
If the query returns more than one row, `Mono` completes exceptionally emitting `IncorrectResultSizeDataAccessException`.
* `all()`: Consume all returned rows returning a `Flux`.
* `count()`: Apply a count projection returning `Mono<Long>`.
* `exists()`: Return whether the query yields any rows by returning `Mono<Boolean>`.
You can use the `select()` entry point to express your `SELECT` queries.
The resulting `SELECT` queries support the commonly used clauses (`WHERE` and `ORDER BY`) and support pagination.
The fluent API style let you chain together multiple methods while having easy-to-understand code.
To improve readability, you can use static imports that let you avoid using the 'new' keyword for creating `Criteria` instances.
[[r2dbc.datbaseclient.fluent-api.criteria]]
=== Methods for the Criteria Class
The `Criteria` class provides the following methods, all of which correspond to SQL operators:
* `Criteria` *and* `(String column)`: Adds a chained `Criteria` with the specified `property` to the current `Criteria` and returns the newly created one.
* `Criteria` *or* `(String column)`: Adds a chained `Criteria` with the specified `property` to the current `Criteria` and returns the newly created one.
* `Criteria` *greaterThan* `(Object o)`: Creates a criterion by using the `>` operator.
* `Criteria` *greaterThanOrEquals* `(Object o)`: Creates a criterion by using the `>=` operator.
* `Criteria` *in* `(Object... o)`: Creates a criterion by using the `IN` operator for a varargs argument.
* `Criteria` *in* `(Collection<?> collection)`: Creates a criterion by using the `IN` operator using a collection.
* `Criteria` *is* `(Object o)`: Creates a criterion by using column matching (`property = value`).
* `Criteria` *isNull* `()`: Creates a criterion by using the `IS NULL` operator.
* `Criteria` *isNotNull* `()`: Creates a criterion by using the `IS NOT NULL` operator.
* `Criteria` *lessThan* `(Object o)`: Creates a criterion by using the `<` operator.
* `Criteria` *lessThanOrEquals* `(Object o)`: Creates a criterion by using the `<=` operator.
* `Criteria` *like* `(Object o)`: Creates a criterion by using the `LIKE` operator without escape character processing.
* `Criteria` *not* `(Object o)`: Creates a criterion by using the `!=` operator.
* `Criteria` *notIn* `(Object... o)`: Creates a criterion by using the `NOT IN` operator for a varargs argument.
* `Criteria` *notIn* `(Collection<?> collection)`: Creates a criterion by using the `NOT IN` operator using a collection.
You can use `Criteria` with `SELECT`, `UPDATE`, and `DELETE` queries.
[[r2dbc.entityoperations.fluent-api.insert]]
== Inserting Data
You can use the `insert()` entry point to insert data.
Consider the following simple typed insert operation:
====
[source,java,indent=0]
----
include::../{example-root}/R2dbcEntityTemplateSnippets.java[tags=insert]
----
<1> Using `Person` with the `into(…)` method sets the `INTO` table, based on mapping metadata.
It also prepares the insert statement to accept `Person` objects for inserting.
<2> Provide a scalar `Person` object.
Alternatively, you can supply a `Publisher` to run a stream of `INSERT` statements.
This method extracts all non-`null` values and inserts them.
====
[[r2dbc.entityoperations.fluent-api.update]]
== Updating Data
You can use the `update()` entry point to update rows.
Updating data starts by specifying the table to update by accepting `Update` specifying assignments.
It also accepts `Query` to create a `WHERE` clause.
Consider the following simple typed update operation:
====
[source,java]
----
Person modified = …
include::../{example-root}/R2dbcEntityTemplateSnippets.java[tags=update]
----
<1> Update `Person` objects and apply mapping based on mapping metadata.
<2> Set a different table name by calling the `inTable(…)` method.
<2> Specify a query that translates into a `WHERE` clause.
<3> Apply the `Update` object.
Set in this case `age` to `42` and return the number of affected rows.
====
[[r2dbc.entityoperations.fluent-api.delete]]
== Deleting Data
You can use the `delete()` entry point to delete rows.
Removing data starts with a specification of the table to delete from and, optionally, accepts a `Criteria` to create a `WHERE` clause.
Consider the following simple insert operation:
====
[source,java]
----
include::../{example-root}/R2dbcEntityTemplateSnippets.java[tags=delete]
----
<1> Delete `Person` objects and apply mapping based on mapping metadata.
<2> Set a different table name by calling the `from(…)` method.
<2> Specify a query that translates into a `WHERE` clause.
<3> Apply the delete operation and return the number of affected rows.
====

86
src/main/asciidoc/reference/r2dbc-transactions.adoc

@ -1,86 +0,0 @@ @@ -1,86 +0,0 @@
[[r2dbc.datbaseclient.transactions]]
= Transactions
A common pattern when using relational databases is grouping multiple queries within a unit of work that is guarded by a transaction.
Relational databases typically associate a transaction with a single transport connection.
Consequently, using different connections results in using different transactions.
Spring Data R2DBC includes transaction-awareness in `DatabaseClient` that lets you group multiple statements within the same transaction by using {spring-framework-ref}/data-access.html#transaction[Spring's Transaction Management].
Spring Data R2DBC provides an implementation for `ReactiveTransactionManager` with `R2dbcTransactionManager`.
The following example shows how to programmatically manage a transaction
.Programmatic Transaction Management
====
[source,java]
----
ReactiveTransactionManager tm = new R2dbcTransactionManager(connectionFactory);
TransactionalOperator operator = TransactionalOperator.create(tm); <1>
DatabaseClient client = DatabaseClient.create(connectionFactory);
Mono<Void> atomicOperation = client.execute("INSERT INTO person (id, name, age) VALUES(:id, :name, :age)")
.bind("id", "joe")
.bind("name", "Joe")
.bind("age", 34)
.fetch().rowsUpdated()
.then(client.execute("INSERT INTO contacts (id, name) VALUES(:id, :name)")
.bind("id", "joe")
.bind("name", "Joe")
.fetch().rowsUpdated())
.then()
.as(operator::transactional); <2>
});
----
<1> Associate the `TransactionalOperator` with the `ReactiveTransactionManager`.
<2> Bind the operation to the `TransactionalOperator`.
====
{spring-framework-ref}/data-access.html#transaction-declarative[Spring's declarative Transaction Management] is a less invasive, annotation-based approach to transaction demarcation, as the following example shows:
.Declarative Transaction Management
====
[source,java]
----
@Configuration
@EnableTransactionManagement <1>
class Config extends AbstractR2dbcConfiguration {
@Override
public ConnectionFactory connectionFactory() {
return // ...
}
@Bean
ReactiveTransactionManager transactionManager(ConnectionFactory connectionFactory) { <2>
return new R2dbcTransactionManager(connectionFactory);
}
}
@Service
class MyService {
private final DatabaseClient client;
MyService(DatabaseClient client) {
this.client = client;
}
@Transactional
public Mono<Void> insertPerson() {
return client.execute("INSERT INTO person (id, name, age) VALUES(:id, :name, :age)")
.bind("id", "joe")
.bind("name", "Joe")
.bind("age", 34)
.fetch().rowsUpdated()
.then(client.execute("INSERT INTO contacts (id, name) VALUES(:id, :name)")
.bind("id", "joe")
.bind("name", "Joe")
.fetch().rowsUpdated())
.then();
}
}
----
<1> Enable declarative transaction management.
<2> Provide a `ReactiveTransactionManager` implementation to back reactive transaction features.
====

8
src/main/asciidoc/reference/r2dbc.adoc

@ -3,10 +3,4 @@ @@ -3,10 +3,4 @@
include::r2dbc-core.adoc[]
include::r2dbc-databaseclient.adoc[leveloffset=+1]
include::r2dbc-sql.adoc[leveloffset=+1]
include::r2dbc-fluent.adoc[leveloffset=+1]
include::r2dbc-transactions.adoc[leveloffset=+1]
include::r2dbc-template.adoc[leveloffset=+1]

48
src/test/java/org/springframework/data/r2dbc/documentation/Person.java

@ -0,0 +1,48 @@ @@ -0,0 +1,48 @@
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.r2dbc.documentation;
// tag::class[]
public class Person {
private final String id;
private final String name;
private final int age;
public Person(String id, String name, int age) {
this.id = id;
this.name = name;
this.age = age;
}
public String getId() {
return id;
}
public String getName() {
return name;
}
public int getAge() {
return age;
}
@Override
public String toString() {
return "Person [id=" + id + ", name=" + name + ", age=" + age + "]";
}
}
// end::class[]}

39
src/test/java/org/springframework/data/r2dbc/documentation/PersonRepository.java

@ -0,0 +1,39 @@ @@ -0,0 +1,39 @@
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.r2dbc.documentation;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import org.springframework.data.r2dbc.repository.Modifying;
import org.springframework.data.r2dbc.repository.Query;
import org.springframework.data.repository.reactive.ReactiveCrudRepository;
public interface PersonRepository extends ReactiveCrudRepository<Person, String> {
Flux<Person> findByFirstname(String firstname);
// tag::atModifying[]
@Modifying
@Query("UPDATE person SET firstname = :firstname where lastname = :lastname")
Mono<Integer> setFixedFirstnameFor(String firstname, String lastname);
// end::atModifying[]
// tag::spel[]
@Query("SELECT * FROM person WHERE lastname = :#{[0]}")
Flux<Person> findByQueryWithExpression(String lastname);
// end::spel[]
}

53
src/test/java/org/springframework/data/r2dbc/documentation/PersonRepositoryTests.java

@ -0,0 +1,53 @@ @@ -0,0 +1,53 @@
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.r2dbc.documentation;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import reactor.test.StepVerifier;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
//@formatter:off
// tag::class[]
@ExtendWith(SpringExtension.class)
@ContextConfiguration
class PersonRepositoryTests {
@Autowired
PersonRepository repository;
@Test
void readsAllEntitiesCorrectly() {
repository.findAll()
.as(StepVerifier::create)
.expectNextCount(1)
.verifyComplete();
}
@Test
void readsEntitiesByNameCorrectly() {
repository.findByFirstname("Hello World")
.as(StepVerifier::create)
.expectNextCount(1)
.verifyComplete();
}
}
// end::class[]

60
src/test/java/org/springframework/data/r2dbc/documentation/R2dbcApp.java

@ -0,0 +1,60 @@ @@ -0,0 +1,60 @@
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.r2dbc.documentation;
// tag::class[]
import io.r2dbc.spi.ConnectionFactories;
import io.r2dbc.spi.ConnectionFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import reactor.test.StepVerifier;
import org.springframework.data.r2dbc.core.R2dbcEntityTemplate;
public class R2dbcApp {
private static final Log log = LogFactory.getLog(R2dbcApp.class);
public static void main(String[] args) {
ConnectionFactory connectionFactory = ConnectionFactories.get("r2dbc:h2:mem:///test?options=DB_CLOSE_DELAY=-1;DB_CLOSE_ON_EXIT=FALSE");
R2dbcEntityTemplate template = new R2dbcEntityTemplate(connectionFactory);
template.getDatabaseClient().sql("CREATE TABLE person" +
"(id VARCHAR(255) PRIMARY KEY," +
"name VARCHAR(255)," +
"age INT)")
.fetch()
.rowsUpdated()
.as(StepVerifier::create)
.expectNextCount(1)
.verifyComplete();
template.insert(Person.class)
.using(new Person("joe", "Joe", 34))
.as(StepVerifier::create)
.expectNextCount(1)
.verifyComplete();
template.select(Person.class)
.first()
.doOnNext(it -> log.info(it))
.as(StepVerifier::create)
.expectNextCount(1)
.verifyComplete();
}
}
// tag::class[]

109
src/test/java/org/springframework/data/r2dbc/documentation/R2dbcEntityTemplateSnippets.java

@ -0,0 +1,109 @@ @@ -0,0 +1,109 @@
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.r2dbc.documentation;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import org.springframework.data.r2dbc.core.R2dbcEntityTemplate;
import static org.springframework.data.domain.Sort.*;
import static org.springframework.data.domain.Sort.Order.*;
import static org.springframework.data.relational.core.query.Criteria.*;
import static org.springframework.data.relational.core.query.Query.*;
import static org.springframework.data.relational.core.query.Update.*;
/**
* @author Mark Paluch
*/
//@formatter:off
class R2dbcEntityTemplateSnippets {
void saveAndSelect(R2dbcEntityTemplate template) {
// tag::insertAndSelect[]
Person person = new Person("John", "Doe");
Mono<Person> saved = template.insert(person);
Mono<Person> loaded = template.selectOne(query(where("firstname").is("John")),
Person.class);
// end::insertAndSelect[]
}
void select(R2dbcEntityTemplate template) {
// tag::select[]
Flux<Person> loaded = template.select(query(where("firstname").is("John")),
Person.class);
// end::select[]
}
void simpleSelect(R2dbcEntityTemplate template) {
// tag::simpleSelect[]
Flux<Person> people = template.select(Person.class) // <1>
.all();
// end::simpleSelect[]
}
void fullSelect(R2dbcEntityTemplate template) {
// tag::fullSelect[]
Mono<Person> first = template.select(Person.class) // <1>
.from("other_person")
.matching(query(where("firstname").is("John") // <2>
.and("lastname").in("Doe", "White"))
.sort(by(desc("id")))) // <3>
.one(); // <4>
// end::fullSelect[]
}
void insert(R2dbcEntityTemplate template) {
// tag::insert[]
Mono<Person> insert = template.insert(Person.class) // <1>
.using(new Person()); // <2>
// end::insert[]
}
void fluentUpdate(R2dbcEntityTemplate template) {
// tag::update[]
Mono<Integer> update = template.update(Person.class) // <1>
.inTable("other_table") // <2>
.matching(query(where("firstname").is("John"))) // <3>
.apply(update("age", 42)); // <4>
// end::update[]
}
void delete(R2dbcEntityTemplate template) {
// tag::delete[]
Mono<Integer> delete = template.delete(Person.class) // <1>
.from("other_table") // <2>
.matching(query(where("firstname").is("John"))) // <3>
.all(); // <4>
// end::delete[]
}
static class Person {
String firstname, lastname;
public Person(String firstname, String lastname) {
this.firstname = firstname;
this.lastname = lastname;
}}
}
Loading…
Cancel
Save