From 9d56934bfe749503a82c697e0717be590ab587ed Mon Sep 17 00:00:00 2001 From: PaulRMellor <47596553+PaulRMellor@users.noreply.github.com> Date: Thu, 6 Jun 2024 16:10:19 +0100 Subject: [PATCH] docs(refactor): updates to the oauth doc following review (#10143) Signed-off-by: prmellor --- ...anaging-policies-permissions-keycloak.adoc | 24 +- .../oauth/assembly-oauth-authentication.adoc | 29 +- .../oauth/assembly-oauth-authorization.adoc | 19 +- .../assembly-oauth-security.adoc | 18 +- documentation/deploying/deploying.adoc | 2 +- .../con-kafka-keycloak-authz-models.adoc | 64 +--- ...eycloak-authz-services-to-kafka-model.adoc | 38 +-- .../con-oauth-authentication-broker.adoc | 323 ++++++++++++------ ...n-oauth-authentication-client-options.adoc | 27 +- .../con-oauth-authentication-client.adoc | 184 +++++++++- .../oauth/con-oauth-authentication-flow.adoc | 108 ------ .../oauth/con-oauth-authorization-intro.adoc | 28 -- .../con-oauth-authorization-mechanism.adoc | 20 -- .../modules/oauth/con-oauth-config.adoc | 20 +- .../oauth/con-oauth-reauthentication.adoc | 30 +- .../oauth/con-oauth-server-examples.adoc | 16 - ...oc-oauth-authentication-broker-config.adoc | 170 ++------- ...roc-oauth-authorization-broker-config.adoc | 37 +- ...-oauth-authorization-keycloak-example.adoc | 16 +- .../oauth/proc-oauth-client-config.adoc | 160 +-------- .../oauth/proc-oauth-kafka-config.adoc | 44 ++- .../oauth/proc-oauth-server-config.adoc | 28 +- ...mple-permissions-for-kafka-operations.adoc | 2 +- documentation/shared/attributes.adoc | 1 - 24 files changed, 591 insertions(+), 817 deletions(-) rename documentation/assemblies/{security => oauth}/assembly-oauth-security.adoc (65%) delete mode 100644 documentation/modules/oauth/con-oauth-authentication-flow.adoc delete mode 100644 documentation/modules/oauth/con-oauth-authorization-intro.adoc delete mode 100644 documentation/modules/oauth/con-oauth-authorization-mechanism.adoc delete mode 100644 documentation/modules/oauth/con-oauth-server-examples.adoc diff --git a/documentation/assemblies/oauth/assembly-managing-policies-permissions-keycloak.adoc b/documentation/assemblies/oauth/assembly-managing-policies-permissions-keycloak.adoc index 0f332076c5a..00f6951741b 100644 --- a/documentation/assemblies/oauth/assembly-managing-policies-permissions-keycloak.adoc +++ b/documentation/assemblies/oauth/assembly-managing-policies-permissions-keycloak.adoc @@ -1,23 +1,15 @@ -ifdef::context[:parent-context: {context}] +// This assembly is included in the following assemblies: +// +// assembly-oauth-security.adoc -[id="assembly-managing-policies-permissions-keycloak_{context}"] -= Managing policies and permissions in Keycloak Authorization Services - -:context: authz-model +[id="assembly-managing-policies-permissions-keycloak-{context}"] += Setting up permissions in Keycloak [role="_abstract"] -This section describes the authorization models used by Keycloak Authorization Services and Kafka, and defines the important concepts in each model. - -To grant permissions to access Kafka, you can map Keycloak Authorization Services objects to Kafka resources by creating an _OAuth client specification_ in Keycloak. -Kafka permissions are granted to user accounts or service accounts using Keycloak Authorization Services rules. - -xref:ref-example-permissions-for-kafka-operations_{context}[Examples] are shown of the different user permissions required for common Kafka operations, such as creating and listing topics. +When using Keycloak as the OAuth 2.0 authorization server, Kafka permissions are granted to user accounts or service accounts using authorization permissions. +To grant permissions to access Kafka, create an _OAuth client specification_ in Keycloak that maps the authorization models of Keycloak Authorization Services and Kafka. include::../../modules/oauth/con-kafka-keycloak-authz-models.adoc[leveloffset=+1] - include::../../modules/oauth/con-mapping-keycloak-authz-services-to-kafka-model.adoc[leveloffset=+1] - include::../../modules/oauth/ref-example-permissions-for-kafka-operations.adoc[leveloffset=+1] - -ifdef::parent-context[:context: {parent-context}] -ifndef::parent-context[:!context:] +include::../../modules/oauth/proc-oauth-authorization-keycloak-example.adoc[leveloffset=+1] \ No newline at end of file diff --git a/documentation/assemblies/oauth/assembly-oauth-authentication.adoc b/documentation/assemblies/oauth/assembly-oauth-authentication.adoc index 123dae3c741..f923ceae32b 100644 --- a/documentation/assemblies/oauth/assembly-oauth-authentication.adoc +++ b/documentation/assemblies/oauth/assembly-oauth-authentication.adoc @@ -1,34 +1,23 @@ // This assembly is included in the following assemblies: // -// assembly-deployment-configuration.adoc +// assembly-oauth-security.adoc [id='assembly-oauth-authentication_{context}'] = Using OAuth 2.0 token-based authentication [role="_abstract"] -Strimzi supports the use of link:https://oauth.net/2/[OAuth 2.0 authentication^] using the _OAUTHBEARER_ and _PLAIN_ mechanisms. -Using OAuth 2.0 authentication, application clients can access resources on application servers (called _resource servers_) without exposing account credentials. - -The application client passes an access token as a means of authenticating, which application servers can also use to determine the level of access to grant. -The authorization server handles the granting of access and inquiries about access. - -In the context of Strimzi: - -* Kafka brokers act as OAuth 2.0 resource servers -* Kafka clients act as OAuth 2.0 application clients - +Strimzi supports the use of link:https://oauth.net/2/[OAuth 2.0^] for token-based authentication. +An OAuth 2.0 authorization server handles the granting of access and inquiries about access. Kafka clients authenticate to Kafka brokers. -The brokers and clients communicate with the OAuth 2.0 authorization server, as necessary, to obtain or validate access tokens. +Brokers and clients communicate with the authorization server, as necessary, to obtain or validate access tokens. -For a deployment of Strimzi, OAuth 2.0 integration provides: +For a deployment of Strimzi, OAuth 2.0 integration provides the following support: -* Server-side OAuth 2.0 support for Kafka brokers -* Client-side OAuth 2.0 support for Kafka MirrorMaker, Kafka Connect, and the Kafka Bridge +* Server-side OAuth 2.0 authentication for Kafka brokers +* Client-side OAuth 2.0 authentication for Kafka MirrorMaker, Kafka Connect, and the Kafka Bridge -include::../../modules/oauth/con-oauth-authentication-flow.adoc[leveloffset=+1] include::../../modules/oauth/con-oauth-authentication-broker.adoc[leveloffset=+1] -include::../../modules/oauth/con-oauth-reauthentication.adoc[leveloffset=+1] include::../../modules/oauth/con-oauth-authentication-client.adoc[leveloffset=+1] include::../../modules/oauth/con-oauth-authentication-client-options.adoc[leveloffset=+1] -include::../../modules/oauth/con-oauth-config.adoc[leveloffset=+1] -include::../../modules/oauth/con-oauth-server-examples.adoc[leveloffset=+1] +include::../../modules/oauth/con-oauth-reauthentication.adoc[leveloffset=+1] +include::../../modules/oauth/con-oauth-config.adoc[leveloffset=+1] \ No newline at end of file diff --git a/documentation/assemblies/oauth/assembly-oauth-authorization.adoc b/documentation/assemblies/oauth/assembly-oauth-authorization.adoc index a9fa208f67b..4f2aea2e0db 100644 --- a/documentation/assemblies/oauth/assembly-oauth-authorization.adoc +++ b/documentation/assemblies/oauth/assembly-oauth-authorization.adoc @@ -1,12 +1,21 @@ // This assembly is included in the following assemblies: // -// assembly-deployment-configuration.adoc +// assembly-oauth-security.adoc [id='assembly-oauth-authorization_{context}'] = Using OAuth 2.0 token-based authorization -include::../../modules/oauth/con-oauth-authorization-intro.adoc[leveloffset=+1] -include::../../modules/oauth/con-oauth-authorization-mechanism.adoc[leveloffset=+1] +[role="_abstract"] +Strimzi supports the use of OAuth 2.0 token-based authorization through {keycloak-authorization-services}, +which lets you manage security policies and permissions centrally. + +Security policies and permissions defined in Keycloak grant access to Kafka resources. +Users and clients are matched against policies that permit access to perform specific actions on Kafka brokers. + +Kafka allows all users full access to brokers by default, but also provides the `AclAuthorizer` and `StandardAuthorizer` plugins to configure authorization based on Access Control Lists (ACLs). +The ACL rules managed by these plugins are used to grant or deny access to resources based on _username_, and these rules are stored within the Kafka cluster itself. + +However, OAuth 2.0 token-based authorization with Keycloak offers far greater flexibility on how you wish to implement access control to Kafka brokers. +In addition, you can configure your Kafka brokers to use OAuth 2.0 authorization and ACLs. + include::../../modules/oauth/proc-oauth-authorization-broker-config.adoc[leveloffset=+1] -include::assembly-managing-policies-permissions-keycloak.adoc[leveloffset=+1] -include::../../modules/oauth/proc-oauth-authorization-keycloak-example.adoc[leveloffset=+1] diff --git a/documentation/assemblies/security/assembly-oauth-security.adoc b/documentation/assemblies/oauth/assembly-oauth-security.adoc similarity index 65% rename from documentation/assemblies/security/assembly-oauth-security.adoc rename to documentation/assemblies/oauth/assembly-oauth-security.adoc index 66ee116efc0..fda9d4412c1 100644 --- a/documentation/assemblies/security/assembly-oauth-security.adoc +++ b/documentation/assemblies/oauth/assembly-oauth-security.adoc @@ -8,8 +8,22 @@ Kafka brokers and clients both need to be configured to use OAuth 2.0. OAuth 2.0 enables standardized token-based authentication and authorization between applications, using a central authorization server to issue tokens that grant limited access to resources. You can define specific scopes for fine-grained access control. Scopes correspond to different levels of access to Kafka topics or operations within the cluster. + OAuth 2.0 also supports single sign-on and integration with identity providers. -//oauth options +ifdef::Section[] +For more information on using OAUth 2.0, see the link:https://github.com/strimzi/strimzi-kafka-oauth[Strimzi OAuth 2.0 for Apache Kafka project^]. +endif::Section[] + +//setting up oauth server +include::../../modules/oauth/proc-oauth-server-config.adoc[leveloffset=+1] + +//oauth authentication include::../oauth/assembly-oauth-authentication.adoc[leveloffset=+1] -include::../oauth/assembly-oauth-authorization.adoc[leveloffset=+1] \ No newline at end of file + +//oauth authorization +include::../oauth/assembly-oauth-authorization.adoc[leveloffset=+1] + +//keycloak authorization +include::../oauth/assembly-managing-policies-permissions-keycloak.adoc[leveloffset=+1] + diff --git a/documentation/deploying/deploying.adoc b/documentation/deploying/deploying.adoc index fe5734f8a06..cafc21b06e5 100644 --- a/documentation/deploying/deploying.adoc +++ b/documentation/deploying/deploying.adoc @@ -37,7 +37,7 @@ include::assemblies/deploying/assembly-deploy-client-access.adoc[leveloffset=+1] //Securing the deployment include::assemblies/security/assembly-securing-access.adoc[leveloffset=+1] //using OAuth -include::assemblies/security/assembly-oauth-security.adoc[leveloffset=+1] +include::assemblies/oauth/assembly-oauth-security.adoc[leveloffset=+1] //managing tls certificates include::assemblies/security/assembly-security.adoc[leveloffset=+1] //security context for all pods diff --git a/documentation/modules/oauth/con-kafka-keycloak-authz-models.adoc b/documentation/modules/oauth/con-kafka-keycloak-authz-models.adoc index 2a33bdd14a6..527bacab043 100644 --- a/documentation/modules/oauth/con-kafka-keycloak-authz-models.adoc +++ b/documentation/modules/oauth/con-kafka-keycloak-authz-models.adoc @@ -1,61 +1,29 @@ [id="con-kafka-keycloak-authz-models_{context}"] -= Kafka and Keycloak authorization models overview += Kafka and Keycloak authorization models [role="_abstract"] - Kafka and Keycloak Authorization Services use different authorization models. -[discrete] -== Kafka authorization model +* Kafka's authorization model uses _resource types_ and _operations_ to describe ACLs for the _user_ +* The Keycloak Authorization Services model has four concepts for defining and granting permissions: +** _resources_ +** _authorization scopes_ +** _policies_ +** _permissions_ -Kafka's authorization model uses _resource types_. +.Kafka authorization model When a Kafka client performs an action on a broker, the broker uses the configured `KeycloakAuthorizer` to check the client's permissions, based on the action and resource type. -Kafka uses five resource types to control access: `Topic`, `Group`, `Cluster`, `TransactionalId`, and `DelegationToken`. -Each resource type has a set of available permissions. - -*Topic* - -* `Create` -* `Write` -* `Read` -* `Delete` -* `Describe` -* `DescribeConfigs` -* `Alter` -* `AlterConfigs` - -*Group* - -* `Read` -* `Describe` -* `Delete` - -*Cluster* +Each resource type has a set of available permissions for operations. +For example, the `Topic` resource type has `Create` and `Write` permissions among others. -* `Create` -* `Describe` -* `Alter` -* `DescribeConfigs` -* `AlterConfigs` -* `IdempotentWrite` -* `ClusterAction` +Refer to the https://kafka.apache.org/documentation/#security_authz_primitives[Kafka authorization model] in the Kafka documentation for the full list of resources and permissions. -*TransactionalId* +.Keycloak Authorization Services model -* `Describe` -* `Write` +The Keycloak Authorization Services model defines authorized actions. -*DelegationToken* - -* `Describe` - -[discrete] -== Keycloak Authorization Services model - -The Keycloak Authorization Services model has four concepts for defining and granting permissions: _resources_, _authorization scopes_, _policies_, and _permissions_. - -Resources:: A resource is a set of resource definitions that are used to match resources with permitted actions. +Resources:: Resources are matched with permitted actions. A resource might be an individual topic, for example, or all topics with names starting with the same prefix. A resource definition is associated with a set of available authorization scopes, which represent a set of all actions available on the resource. Often, only a subset of these actions is actually permitted. @@ -69,7 +37,3 @@ Policies can match: * _User accounts_ based on username, groups, or roles. Permissions:: A permission grants a subset of authorization scopes on a specific resource definition to a set of users. - -[role="_additional-resources"] -.Additional resources -* link:https://kafka.apache.org/documentation/#security_authz_primitives[Kafka authorization model] diff --git a/documentation/modules/oauth/con-mapping-keycloak-authz-services-to-kafka-model.adoc b/documentation/modules/oauth/con-mapping-keycloak-authz-services-to-kafka-model.adoc index bc9af1a5e94..e21d3864347 100644 --- a/documentation/modules/oauth/con-mapping-keycloak-authz-services-to-kafka-model.adoc +++ b/documentation/modules/oauth/con-mapping-keycloak-authz-services-to-kafka-model.adoc @@ -1,13 +1,12 @@ [id="con-mapping-keycloak-authz-services-to-kafka-model_{context}"] -= Map Keycloak Authorization Services to the Kafka authorization model += Mapping authorization models [role="_abstract"] +The Kafka authorization model is used as a basis for defining the Keycloak roles and resources that control access to Kafka. -The Kafka authorization model is used as a basis for defining the Keycloak roles and resources that will control access to Kafka. - -To grant Kafka permissions to user accounts or service accounts, you first create an _OAuth client specification_ in Keycloak for the Kafka broker. +To grant Kafka permissions to user accounts or service accounts, you first create an _OAuth client specification_ in Keycloak for the Kafka cluster. You then specify Keycloak Authorization Services rules on the client. -Typically, the client id of the OAuth client that represents the broker is `kafka`. +Typically, the client ID of the OAuth client that represents the Kafka cluster is `kafka`. The xref:proc-oauth-authorization-keycloak-example_str[example configuration files] provided with Strimzi use `kafka` as the OAuth client id. [NOTE] @@ -19,18 +18,19 @@ However, you can also use different OAuth client ids (for example, `my-cluster-k The `kafka` client definition must have the *Authorization Enabled* option enabled in the Keycloak Admin Console. -All permissions exist within the scope of the `kafka` client. If you have different Kafka clusters configured with different OAuth client IDs, they each need a separate set of permissions even though they're part of the same Keycloak realm. +All permissions exist within the scope of the `kafka` client. +If you have different Kafka clusters configured with different OAuth client IDs, they each need a separate set of permissions even though they're part of the same Keycloak realm. When the Kafka client uses OAUTHBEARER authentication, the Keycloak authorizer (`KeycloakAuthorizer`) uses the access token of the current session to retrieve a list of grants from the Keycloak server. -To retrieve the grants, the authorizer evaluates the Keycloak Authorization Services policies and permissions. +To grant permissions, the authorizer evaluates the grants list (received and cached) from Keycloak Authorization Services based on the access token owner's policies and permissions. -.Authorization scopes for Kafka permissions +.Uploading authorization scopes for Kafka permissions An initial Keycloak configuration usually involves uploading authorization scopes to create a list of all possible actions that can be performed on each Kafka resource type. This step is performed once only, before defining any permissions. You can add authorization scopes manually instead of uploading them. -Authorization scopes must contain all the possible Kafka permissions regardless of the resource type: +Authorization scopes should contain the following Kafka permissions regardless of the resource type: * `Create` * `Write` @@ -38,21 +38,23 @@ Authorization scopes must contain all the possible Kafka permissions regardless * `Delete` * `Describe` * `Alter` -* `DescribeConfig` -* `AlterConfig` +* `DescribeConfigs` +* `AlterConfigs` * `ClusterAction` * `IdempotentWrite` -[NOTE] -==== If you're certain you won't need a permission (for example, `IdempotentWrite`), you can omit it from the list of authorization scopes. However, that permission won't be available to target on Kafka resources. + +[NOTE] +==== +The `All` permission is not supported. ==== .Resource patterns for permissions checks Resource patterns are used for pattern matching against the targeted resources when performing permission checks. -The general pattern format is `__RESOURCE-TYPE:PATTERN-NAME__`. +The general pattern format is `:`. The resource types mirror the Kafka authorization model. The pattern allows for two matching options: @@ -69,7 +71,7 @@ Group:orders-* Cluster:* ---- -Additionally, the general pattern format can be prefixed by `kafka-cluster:__CLUSTER-NAME__` followed by a comma, where _CLUSTER-NAME_ refers to the `metadata.name` in the Kafka custom resource. +Additionally, the general pattern format can be prefixed by `kafka-cluster:` followed by a comma, where `` refers to the `metadata.name` in the Kafka custom resource. .Example patterns for resources with cluster prefix [source] @@ -93,7 +95,6 @@ Targeting can refer to: * Specific user or service accounts * Realm roles or client roles * User groups -* JavaScript rules to match a client IP address A policy is given a unique name and can be reused to target multiple permissions to multiple resources. @@ -103,8 +104,3 @@ Use fine-grained permissions to pull together the policies, resources, and autho The name of each permission should clearly define which permissions it grants to which users. For example, `Dev Team B can read from topics starting with x`. - -[role="_additional-resources"] -.Additional resources - -* For more information about how to configure permissions through Keycloak Authorization Services, see xref:proc-oauth-authorization-keycloak-example_str[]. diff --git a/documentation/modules/oauth/con-oauth-authentication-broker.adoc b/documentation/modules/oauth/con-oauth-authentication-broker.adoc index 00063657717..2ba567a132a 100644 --- a/documentation/modules/oauth/con-oauth-authentication-broker.adoc +++ b/documentation/modules/oauth/con-oauth-authentication-broker.adoc @@ -3,32 +3,20 @@ // assembly-oauth-authentication.adoc [id='con-oauth-authentication-broker-{context}'] -= OAuth 2.0 Kafka broker configuration += Configuring OAuth 2.0 authentication on listeners -Kafka broker configuration for OAuth 2.0 involves: +[role="_abstract"] +To secure Kafka brokers with OAuth 2.0 authentication, configure a listener in the `Kafka` resource to use OAUth 2.0 authentication and a client authentication mechanism, and add further configuration depending on the authentication mechanism and type of token validation used in the authentication. -* Creating the OAuth 2.0 client in the authorization server -* Configuring OAuth 2.0 authentication in the Kafka custom resource +.Configuring listeners to use `oauth` authentication -NOTE: In relation to the authorization server, Kafka brokers and Kafka clients are both regarded as OAuth 2.0 clients. - -== OAuth 2.0 client configuration on an authorization server - -To configure a Kafka broker to validate the token received during session initiation, -the recommended approach is to create an OAuth 2.0 _client_ definition in an authorization server, configured as _confidential_, with the following client credentials enabled: - -* Client ID of `kafka` (for example) -* Client ID and Secret as the authentication mechanism - -NOTE: You only need to use a client ID and secret when using a non-public introspection endpoint of the authorization server. -The credentials are not typically required when using public authorization server endpoints, as with fast local JWT token validation. - -== OAuth 2.0 authentication configuration in the Kafka cluster - -To use OAuth 2.0 authentication in the Kafka cluster, you specify, for example, a `tls` listener configuration for your Kafka cluster custom resource with the authentication method `oauth`: +Specify a listener in the `Kafka` resource with an `oauth` authentication type. +You can configure internal and external listeners. +We recommend using OAuth 2.0 authentication together with TLS encryption (`tls: true`). +Without encryption, the connection is vulnerable to network eavesdropping and unauthorized access through token theft. -.Assigining the authentication method type for OAuth 2.0 -[source,yaml,subs="+quotes, attributes"] +.Example listener configuration with OAuth 2.0 authentication +[source,yaml,subs="+attributes"] ---- apiVersion: {KafkaApiVersion} kind: Kafka @@ -41,121 +29,234 @@ spec: type: internal tls: true authentication: - type: *oauth* + type: oauth + - name: external3 + port: 9094 + type: loadbalancer + tls: true + authentication: + type: oauth #... ---- -You can configure OAuth 2.0 authentication in your listeners. -We recommend using OAuth 2.0 authentication together with TLS encryption (`tls: true`). -Without encryption, the connection is vulnerable to network eavesdropping and unauthorized access through token theft. - -You configure an `external` listener with `type: oauth` for a secure transport layer to communicate with the client. - -.Using OAuth 2.0 with an external listener -[source,yaml,subs="+quotes"] +.Enabling SASL authentication mechanisms + +Use one or both of the following SASL mechanisms for clients to exchange credentials and establish authenticated sessions with Kafka. + +`OAUTHBEARER`:: Using the `OAUTHBEARER` authentication mechanism, credentials exchange uses a bearer token provided by an OAuth callback handler. +Token provision can be configured to use the following methods: ++ +-- +* Client ID and secret (using the the OAuth 2.0 _client credentials mechanism_) +* Long-lived access token +* Long-lived refresh token obtained manually +-- ++ +`OAUTHBEARER` is recommended as it provides a higher level of security than `PLAIN`, though it can only be used by Kafka clients that support the `OAUTHBEARER` mechanism at the protocol level. +Client credentials are never shared with Kafka. + +`PLAIN`:: `PLAIN` is a simple authentication mechanism used by all Kafka client tools. +Consider using `PLAIN` only with Kafka clients that do not support `OAUTHBEARER`. +Using the `PLAIN` authentication mechanism, credentials exchange can be configured to use the following methods: ++ +-- +* Client ID and secret (using the the OAuth 2.0 _client credentials mechanism_) +* Long-lived access token + +Regardless of the method used, the client must provide `username` and `password` properties to Kafka. +-- ++ +Credentials are handled centrally behind a compliant authorization server, similar to how `OAUTHBEARER` authentication is used. +The username extraction process depends on the authorization server configuration. + +`OAUTHBEARER` is automatically enabled in the `oauth` listener configuration for the Kafka broker. +To use the `PLAIN` mechanism, you must set the `enablePlain` property to `true`. + +In the following example, the `PLAIN` mechanism is enabled, and the `OAUTHBEARER` mechanism is disabled on a listener using the `enableOauthBearer` property. + +.Example listener configuration for the `PLAIN` mechanism +[source,yaml,subs="+attributes"] ---- -# ... -listeners: - - name: external3 - port: 9094 - type: loadbalancer - tls: true - authentication: - type: *oauth* - #... +apiVersion: {KafkaApiVersion} +kind: Kafka +spec: + kafka: + # ... + listeners: + - name: tls + port: 9093 + type: internal + tls: true + authentication: + type: oauth + - name: external3 + port: 9094 + type: loadbalancer + tls: true + authentication: + type: oauth + enablePlain: true + enableOauthBearer: false + #... ---- -The `tls` property is _false_ by default, so it must be enabled. - -When you have defined the type of authentication as OAuth 2.0, you add configuration based on the type of validation, either as xref:con-oauth-authentication-broker-fast-local[fast local JWT validation] or xref:con-oauth-authentication-broker-intro-local[token validation using an introspection endpoint]. - -The procedure to configure OAuth 2.0 for listeners, with descriptions and examples, is described in xref:proc-oauth-authentication-broker-config-{context}[Configuring OAuth 2.0 support for Kafka brokers]. +When you have defined the type of authentication as OAuth 2.0, you add configuration based on the type of validation, either as fast local JWT validation or token validation using an introspection endpoint. -[[con-oauth-authentication-broker-fast-local]] -== Fast local JWT token validation configuration +[id='con-oauth-authentication-broker-jwt-{context}'] +.Configuring fast local JWT token validation -Fast local JWT token validation checks a JWT token signature locally. +Fast local JWT token validation involves checking a JWT token signature locally to ensure that the token meets the following criteria: -The local check ensures that a token: - -* Conforms to type by containing a (_typ_) claim value of `Bearer` for an access token -* Is valid (not expired) +* Contains a `typ` (type) or `token_type` header claim value of `Bearer` to indicate it is an access token +* Is currently valid and not expired * Has an issuer that matches a `validIssuerURI` You specify a `validIssuerURI` attribute when you configure the listener, so that any tokens not issued by the authorization server are rejected. -The authorization server does not need to be contacted during fast local JWT token validation. -You activate fast local JWT token validation by specifying a `jwksEndpointUri` attribute, the endpoint exposed by the OAuth 2.0 authorization server. +The authorization server does not need to be contacted during fast local JWT token validation. +You activate fast local JWT token validation by specifying a `jwksEndpointUri` attribute, the endpoint exposed by the OAuth 2.0 authorization server. The endpoint contains the public keys used to validate signed JWT tokens, which are sent as credentials by Kafka clients. -NOTE: All communication with the authorization server should be performed using TLS encryption. - -You can configure a certificate truststore as a Kubernetes Secret in your Strimzi project namespace, and use a `tlsTrustedCertificates` attribute to point to the Kubernetes Secret containing the truststore file. +All communication with the authorization server should be performed using TLS encryption. +You can configure a certificate truststore as a Kubernetes `Secret` in your Strimzi project namespace, and use a `tlsTrustedCertificates` attribute to point to the Kubernetes Secret containing the truststore file. -You might want to configure a `userNameClaim` to properly extract a username from the JWT token. -If required, you can use a JsonPath expression like `"['user.info'].['user.id']"` to retrieve the username from nested JSON attributes within a token. +You might want to configure a `userNameClaim` to properly extract a username from the JWT token. +If required, you can use a JsonPath expression like `"['user.info'].['user.id']"` to retrieve the username from nested JSON attributes within a token. -If you want to use Kafka ACL authorization, you need to identify the user by their username during authentication. -(The `sub` claim in JWT tokens is typically a unique ID, not a username.) +If you want to use Kafka ACL authorization, identify the user by their username during authentication. (The `sub` claim in JWT tokens is typically a unique ID, not a username.) .Example configuration for fast local JWT token validation -[source,yaml,subs="+quotes, attributes"] +[source,yaml,subs="+quotes,attributes"] ---- -apiVersion: {KafkaApiVersion} -kind: Kafka -spec: - kafka: - #... - listeners: - - name: tls - port: 9093 - type: internal - tls: true - authentication: - type: *oauth* - validIssuerUri: /auth/realms/tls> - jwksEndpointUri: /auth/realms/tls/protocol/openid-connect/certs> - userNameClaim: preferred_username - maxSecondsWithoutReauthentication: 3600 - tlsTrustedCertificates: - - secretName: oauth-server-cert - certificate: ca.crt - #... +#... +- name: external3 + port: 9094 + type: loadbalancer + tls: true + authentication: + type: oauth # <1> + validIssuerUri: https:/// # <2> + jwksEndpointUri: https:/// # <3> + userNameClaim: preferred_username # <4> + maxSecondsWithoutReauthentication: 3600 # <5> + tlsTrustedCertificates: # <6> + - secretName: oauth-server-cert + certificate: ca.crt + disableTlsHostnameVerification: true # <7> + jwksExpirySeconds: 360 # <8> + jwksRefreshSeconds: 300 # <9> + jwksMinRefreshPauseSeconds: 1 # <10> ---- - -[[con-oauth-authentication-broker-intro-local]] -== OAuth 2.0 introspection endpoint configuration - -Token validation using an OAuth 2.0 introspection endpoint treats a received access token as opaque. -The Kafka broker sends an access token to the introspection endpoint, which responds with the token information necessary for validation. +<1> Listener type set to `oauth`. +<2> URI of the token issuer used for authentication. +<3> URI of the JWKS certificate endpoint used for local JWT validation. +<4> The token claim (or key) that contains the actual username used to identify the user. Its value depends on the authorization server. If necessary, a JsonPath expression like `"['user.info'].['user.id']"` can be used to retrieve the username from nested JSON attributes within a token. +<5> (Optional) Activates the Kafka re-authentication mechanism that enforces session expiry to the same length of time as the access token. If the specified value is less than the time left for the access token to expire, then the client will have to re-authenticate before the actual token expiry. By default, the session does not expire when the access token expires, and the client does not attempt re-authentication. +<6> (Optional) Trusted certificates for TLS connection to the authorization server. +<7> (Optional) Disable TLS hostname verification. Default is `false`. +<8> The duration the JWKS certificates are considered valid before they expire. Default is `360` seconds. If you specify a longer time, consider the risk of allowing access to revoked certificates. +<9> The period between refreshes of JWKS certificates. The interval must be at least 60 seconds shorter than the expiry interval. Default is `300` seconds. +<10> The minimum pause in seconds between consecutive attempts to refresh JWKS public keys. When an unknown signing key is encountered, the JWKS keys refresh is scheduled outside the regular periodic schedule with at least the specified pause since the last refresh attempt. The refreshing of keys follows the rule of exponential backoff, retrying on unsuccessful refreshes with ever increasing pause, until it reaches `jwksRefreshSeconds`. The default value is 1. + + +[id='con-oauth-authentication-broker-intro-{context}'] +.Configuring token validation using an introspection endpoint + +Token validation using an OAuth 2.0 introspection endpoint treats a received access token as opaque. The Kafka broker sends an access token to the introspection endpoint, which responds with the token information necessary for validation. Importantly, it returns up-to-date information if the specific access token is valid, and also information about when the token expires. -To configure OAuth 2.0 introspection-based validation, you specify an `introspectionEndpointUri` attribute rather than the `jwksEndpointUri` attribute specified for fast local JWT token validation. +To configure OAuth 2.0 introspection-based validation, you specify an introspectionEndpointUri attribute rather than the `jwksEndpointUri` attribute specified for fast local JWT token validation. Depending on the authorization server, you typically have to specify a `clientId` and `clientSecret`, because the introspection endpoint is usually protected. -.Example configuration for an introspection endpoint -[source,yaml,subs="+quotes, attributes"] +.Example token validation configuration using an introspection endpoint +[source,yaml,subs="+quotes,attributes"] ---- -apiVersion: {KafkaApiVersion} -kind: Kafka -spec: - kafka: - listeners: - - name: tls - port: 9093 - type: internal - tls: true - authentication: - type: *oauth* - clientId: kafka-broker - clientSecret: - secretName: my-cluster-oauth - key: clientSecret - validIssuerUri: /auth/realms/tls> - introspectionEndpointUri: /auth/realms/tls/protocol/openid-connect/token/introspect> - userNameClaim: preferred_username - maxSecondsWithoutReauthentication: 3600 - tlsTrustedCertificates: +- name: external3 + port: 9094 + type: loadbalancer + tls: true + authentication: + type: oauth + validIssuerUri: https:/// + introspectionEndpointUri: https:/// # <1> + clientId: kafka-broker # <2> + clientSecret: # <3> + secretName: my-cluster-oauth + key: clientSecret + userNameClaim: preferred_username # <4> + maxSecondsWithoutReauthentication: 3600 # <5> + tlsTrustedCertificates: - secretName: oauth-server-cert certificate: ca.crt ---- +<1> URI of the token introspection endpoint. +<2> Client ID to identify the client. +<3> Client Secret and client ID is used for authentication. +<4> The token claim (or key) that contains the actual username used to identify the user. Its value depends on the authorization server. If necessary, a JsonPath expression like `"['user.info'].['user.id']"` can be used to retrieve the username from nested JSON attributes within a token. +<5> (Optional) Activates the Kafka re-authentication mechanism that enforces session expiry to the same length of time as the access token. If the specified value is less than the time left for the access token to expire, then the client will have to re-authenticate before the actual token expiry. By default, the session does not expire when the access token expires, and the client does not attempt re-authentication. + +[id='con-oauth-authentication-broker-additional-{context}'] +.Including additional configuration options + +Specify additional settings depending on the authentication requirements and the authorization server you are using. +Some of these properties apply only to certain authentication mechanisms or when used in combination with other properties. + +For example, when using OAUth over `PLAIN`, access tokens are passed as `password` property values with or without an `$accessToken:` prefix. + +* If you configure a token endpoint (`tokenEndpointUri`) in the listener configuration, you need the prefix. +* If you don't configure a token endpoint in the listener configuration, you don't need the prefix. +The Kafka broker interprets the password as a raw access token. + +If the `password` is set as the access token, the `username` must be set to the same principal name that the Kafka broker obtains from the access token. +You can specify username extraction options in your listener using the `userNameClaim`, `fallbackUserNameClaim`, `fallbackUsernamePrefix`, and `userInfoEndpointUri` properties. +The username extraction process also depends on your authorization server; in particular, how it maps client IDs to account names. + +NOTE: The `PLAIN` mechanism does not support password grant authentication. +Use either client credentials (client ID + secret) or an access token for authentication. + +.Example additional configuration settings +[source,yaml,subs="+quotes,attributes"] +---- + # ... + authentication: + type: oauth + # ... + checkIssuer: false # <1> + checkAudience: true # <2> + fallbackUserNameClaim: client_id # <3> + fallbackUserNamePrefix: client-account- # <4> + validTokenType: bearer # <5> + userInfoEndpointUri: https:/// # <6> + enableOauthBearer: false # <7> + enablePlain: true # <8> + tokenEndpointUri: https:/// # <9> + customClaimCheck: "@.custom == 'custom-value'" # <10> + clientAudience: audience # <11> + clientScope: scope # <12> + connectTimeoutSeconds: 60 # <13> + readTimeoutSeconds: 60 # <14> + httpRetries: 2 # <15> + httpRetryPauseMs: 300 # <16> + groupsClaim: "$.groups" # <17> + groupsClaimDelimiter: "," # <18> + includeAcceptHeader: false # <19> +---- +<1> If your authorization server does not provide an `iss` claim, it is not possible to perform an issuer check. In this situation, set `checkIssuer` to `false` and do not specify a `validIssuerUri`. Default is `true`. +<2> If your authorization server provides an `aud` (audience) claim, and you want to enforce an audience check, set `checkAudience` to `true`. Audience checks identify the intended recipients of tokens. As a result, the Kafka broker will reject tokens that do not have its `clientId` in their `aud` claim. Default is `false`. +<3> An authorization server may not provide a single attribute to identify both regular users and clients. When a client authenticates in its own name, the server might provide a _client ID_. When a user authenticates using a username and password to obtain a refresh token or an access token, the server might provide a _username_ attribute in addition to a client ID. Use this fallback option to specify the username claim (attribute) to use if a primary user ID attribute is not available. If necessary, a JsonPath expression like `"['client.info'].['client.id']"` can be used to retrieve the fallback username to retrieve the username from nested JSON attributes within a token. +<4> In situations where `fallbackUserNameClaim` is applicable, it may also be necessary to prevent name collisions between the values of the username claim, and those of the fallback username claim. Consider a situation where a client called `producer` exists, but also a regular user called `producer` exists. In order to differentiate between the two, you can use this property to add a prefix to the user ID of the client. +<5> (Only applicable when using `introspectionEndpointUri`) Depending on the authorization server you are using, the introspection endpoint may or may not return the _token type_ attribute, or it may contain different values. You can specify a valid token type value that the response from the introspection endpoint has to contain. +<6> (Only applicable when using `introspectionEndpointUri`) The authorization server may be configured or implemented in such a way to not provide any identifiable information in an introspection endpoint response. In order to obtain the user ID, you can configure the URI of the `userinfo` endpoint as a fallback. The `userNameClaim`, `fallbackUserNameClaim`, and `fallbackUserNamePrefix` settings are applied to the response of `userinfo` endpoint. +<7> Set this to `false` to disable the `OAUTHBEARER` mechanism on the listener. At least one of `PLAIN` or `OAUTHBEARER` has to be enabled. Default is `true`. +<8> Set to `true` to enable `PLAIN` authentication on the listener, which is supported for clients on all platforms. +<9> Additional configuration for the `PLAIN` mechanism. If specified, clients can authenticate over `PLAIN` by passing an access token as the `password` using an `$accessToken:` prefix. +For production, always use `https://` urls. +<10> Additional custom rules can be imposed on the JWT access token during validation by setting this to a JsonPath filter query. If the access token does not contain the necessary data, it is rejected. When using the `introspectionEndpointUri`, the custom check is applied to the introspection endpoint response JSON. +<11> An `audience` parameter passed to the token endpoint. An _audience_ is used when obtaining an access token for inter-broker authentication. It is also used in the name of a client for OAuth 2.0 over `PLAIN` client authentication using a `clientId` and `secret`. This only affects the ability to obtain the token, and the content of the token, depending on the authorization server. It does not affect token validation rules by the listener. +<12> A `scope` parameter passed to the token endpoint. A _scope_ is used when obtaining an access token for inter-broker authentication. It is also used in the name of a client for OAuth 2.0 over `PLAIN` client authentication using a `clientId` and `secret`. This only affects the ability to obtain the token, and the content of the token, depending on the authorization server. It does not affect token validation rules by the listener. +<13> The connect timeout in seconds when connecting to the authorization server. The default value is 60. +<14> The read timeout in seconds when connecting to the authorization server. The default value is 60. +<15> The maximum number of times to retry a failed HTTP request to the authorization server. The default value is `0`, meaning that no retries are performed. To use this option effectively, consider reducing the timeout times for the `connectTimeoutSeconds` and `readTimeoutSeconds` options. However, note that retries may prevent the current worker thread from being available to other requests, and if too many requests stall, it could make the Kafka broker unresponsive. +<16> The time to wait before attempting another retry of a failed HTTP request to the authorization server. By default, this time is set to zero, meaning that no pause is applied. This is because many issues that cause failed requests are per-request network glitches or proxy issues that can be resolved quickly. However, if your authorization server is under stress or experiencing high traffic, you may want to set this option to a value of 100 ms or more to reduce the load on the server and increase the likelihood of successful retries. +<17> A JsonPath query that is used to extract groups information from either the JWT token or the introspection endpoint response. This option is not set by default. By configuring this option, a custom authorizer can make authorization decisions based on user groups. +<18> A delimiter used to parse groups information when it is returned as a single delimited string. The default value is ',' (comma). +<19> Some authorization servers have issues with client sending `Accept: application/json` header. By setting `includeAcceptHeader: false` the header will not be sent. Default is `true`. \ No newline at end of file diff --git a/documentation/modules/oauth/con-oauth-authentication-client-options.adoc b/documentation/modules/oauth/con-oauth-authentication-client-options.adoc index 6d7b72dacf2..b0c81318b27 100644 --- a/documentation/modules/oauth/con-oauth-authentication-client-options.adoc +++ b/documentation/modules/oauth/con-oauth-authentication-client-options.adoc @@ -12,8 +12,8 @@ The flows must also be supported by the authorization server used. The Kafka broker listener configuration determines how clients authenticate using an access token. The client can pass a client ID and secret to request an access token. -If a listener is configured to use PLAIN authentication, the client can authenticate with a client ID and secret or username and access token. -These values are passed as the `username` and `password` properties of the PLAIN mechanism. +If a listener is configured to use `PLAIN` authentication, the client can authenticate with a client ID and secret or username and access token. +These values are passed as the `username` and `password` properties of the `PLAIN` mechanism. Listener configuration supports the following token validation options: @@ -30,14 +30,9 @@ Kafka client credentials can also be configured for the following types of authe * Direct local access using a previously generated long-lived access token * Contact with the authorization server for a new access token to be issued (using a client ID and a secret, or a refresh token, or a username and a password) -== Example client authentication flows using the SASL OAUTHBEARER mechanism +== Example client authentication flows using the SASL `OAUTHBEARER` mechanism -You can use the following communication flows for Kafka authentication using the SASL OAUTHBEARER mechanism. - -* xref:oauth-introspection-endpoint-{context}[Client using client ID and secret, with broker delegating validation to authorization server] -* xref:oauth-jwt-{context}[Client using client ID and secret, with broker performing fast local token validation] -* xref:oauth-token-endpoint-{context}[Client using long-lived access token, with broker delegating validation to authorization server] -* xref:oauth-token-jwt-{context}[Client using long-lived access token, with broker performing fast local validation] +You can use the following communication flows for Kafka authentication using the SASL `OAUTHBEARER` mechanism. [id='oauth-introspection-endpoint-{context}'] .Client using client ID and secret, with broker delegating validation to authorization server @@ -46,7 +41,7 @@ image:oauth-introspection-endpoint.png[Client using client ID and secret with br . The Kafka client requests an access token from the authorization server using a client ID and secret, and optionally a refresh token. Alternatively, the client may authenticate using a username and a password. . The authorization server generates a new access token. -. The Kafka client authenticates with the Kafka broker using the SASL OAUTHBEARER mechanism to pass the access token. +. The Kafka client authenticates with the Kafka broker using the SASL `OAUTHBEARER` mechanism to pass the access token. . The Kafka broker validates the access token by calling a token introspection endpoint on the authorization server using its own client ID and secret. . A Kafka client session is established if the token is valid. @@ -57,7 +52,7 @@ image:oauth-jwt-signature.png[Client using client ID and secret with broker perf . The Kafka client authenticates with the authorization server from the token endpoint, using a client ID and secret, and optionally a refresh token. Alternatively, the client may authenticate using a username and a password. . The authorization server generates a new access token. -. The Kafka client authenticates with the Kafka broker using the SASL OAUTHBEARER mechanism to pass the access token. +. The Kafka client authenticates with the Kafka broker using the SASL `OAUTHBEARER` mechanism to pass the access token. . The Kafka broker validates the access token locally using a JWT token signature check, and local token introspection. [id='oauth-token-endpoint-{context}'] @@ -65,7 +60,7 @@ image:oauth-jwt-signature.png[Client using client ID and secret with broker perf image:oauth-introspection-endpoint-long-token.png[Client using long-lived access token with broker delegating validation to authorization server] -. The Kafka client authenticates with the Kafka broker using the SASL OAUTHBEARER mechanism to pass the long-lived access token. +. The Kafka client authenticates with the Kafka broker using the SASL `OAUTHBEARER` mechanism to pass the long-lived access token. . The Kafka broker validates the access token by calling a token introspection endpoint on the authorization server, using its own client ID and secret. . A Kafka client session is established if the token is valid. @@ -74,19 +69,17 @@ image:oauth-introspection-endpoint-long-token.png[Client using long-lived access image:oauth-jwt-signature-token.png[Client using long-lived access token with broker performing fast local validation] -. The Kafka client authenticates with the Kafka broker using the SASL OAUTHBEARER mechanism to pass the long-lived access token. +. The Kafka client authenticates with the Kafka broker using the SASL `OAUTHBEARER` mechanism to pass the long-lived access token. . The Kafka broker validates the access token locally using a JWT token signature check and local token introspection. WARNING: Fast local JWT token signature validation is suitable only for short-lived tokens as there is no check with the authorization server if a token has been revoked. Token expiration is written into the token, but revocation can happen at any time, so cannot be accounted for without contacting the authorization server. Any issued token would be considered valid until it expires. -== Example client authentication flows using the SASL PLAIN mechanism +== Example client authentication flows using the SASL `PLAIN` mechanism -You can use the following communication flows for Kafka authentication using the OAuth PLAIN mechanism. +You can use the following communication flows for Kafka authentication using the OAuth `PLAIN` mechanism. -* xref:oauth-plain-client-id-{context}[Client using a client ID and secret, with the broker obtaining the access token for the client] -* xref:oauth-plain-access-token-{context}[Client using a long-lived access token without a client ID and secret] [id='oauth-plain-client-id-{context}'] .Client using a client ID and secret, with the broker obtaining the access token for the client diff --git a/documentation/modules/oauth/con-oauth-authentication-client.adoc b/documentation/modules/oauth/con-oauth-authentication-client.adoc index 515058bdca0..ef1461f35e9 100644 --- a/documentation/modules/oauth/con-oauth-authentication-client.adoc +++ b/documentation/modules/oauth/con-oauth-authentication-client.adoc @@ -3,26 +3,186 @@ // assembly-oauth-authentication.adoc [id='con-oauth-authentication-client-{context}'] -= OAuth 2.0 Kafka client configuration += Configuring OAuth 2.0 on client applications -A Kafka client is configured with either: +[role="_abstract"] +To configure OAuth 2.0 on client applications, you must specify the following: -* The credentials required to obtain a valid access token from an authorization server (client ID and Secret) -* A valid long-lived access token or refresh token, obtained using tools provided by an authorization server +* SASL (Simple Authentication and Security Layer) security protocols +* SASL mechanisms +* A JAAS (Java Authentication and Authorization Service) module +* Authentication properties to access the authorization server -The only information ever sent to the Kafka broker is an access token. -The credentials used to authenticate with the authorization server to obtain the access token are never sent to the broker. +.Configuring SASL protocols +Specify SASL protocols in the client configuration: + +* `SASL_SSL` for authentication over TLS encrypted connections +* `SASL_PLAINTEXT` for authentication over unencrypted connections + +Use `SASL_SSL` for production and `SASL_PLAINTEXT` for local development only. + +When using `SASL_SSL`, additional `ssl.truststore` configuration is needed. +The truststore configuration is required for secure connection (`https://`) to the OAuth 2.0 authorization server. +To verify the OAuth 2.0 authorization server, add the CA certificate for the authorization server to the truststore in your client configuration. +You can configure a truststore in PEM or PKCS #12 format. + +.Configuring SASL authentication mechanisms + +Specify SASL mechanisms in the client configuration: + +* `OAUTHBEARER` for credentials exchange using a bearer token +* `PLAIN` to pass client credentials (clientId + secret) or an access token + +.Configuring a JAAS module + +Specify a JAAS module that implements the SASL authentication mechanism as a `sasl.jaas.config` property value: + +* `org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule` implements the `OAUTHBEARER` mechanism +* `org.apache.kafka.common.security.plain.PlainLoginModule` implements the `PLAIN` mechanism + +NOTE: For the `OAUTHBEARER` mechanism, Strimzi provides a callback handler for clients that use Kafka Client Java libraries to enable credentials exchange. +For clients in other languages, custom code may be required to obtain the access token. +For the `PLAIN` mechanism, Strimzi provides server-side callbacks to enable credentials exchange. + +To be able to use the `OAUTHBEARER` mechanism, you must also add the custom `io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler` class as the callback handler. +`JaasClientOauthLoginCallbackHandler` handles OAuth callbacks to the authorization server for access tokens during client login. +This enables automatic token renewal, ensuring continuous authentication without user intervention. +Additionally, it handles login credentials for clients using the OAuth 2.0 password grant method. + +.Configuring authentication properties + +Configure the client to use credentials or access tokens for OAuth 2.0 authentication. + +Using client credentials:: Using client credentials involves configuring the client with the necessary credentials (client ID and secret) to obtain a valid access token from an authorization server. This is the simplest mechanism. +Using access tokens:: Using access tokens, the client is configured with a valid long-lived access token or refresh token obtained from an authorization server. +Using access tokens adds more complexity because there is an additional dependency on authorization server tools. +If you are using long-lived access tokens, you may need to configure the client in the authorization server to increase the maximum lifetime of the token. + +The only information ever sent to Kafka is the access token. +The credentials used to obtain the token are never sent to Kafka. When a client obtains an access token, no further communication with the authorization server is needed. -The simplest mechanism is authentication with a client ID and Secret. -Using a long-lived access token, or a long-lived refresh token, adds more complexity because there is an additional dependency on authorization server tools. +SASL authentication properties support the following authentication methods: + +* OAuth 2.0 client credentials +* OAuth 2.0 password grant (deprecated) +* Access token +* Refresh token -NOTE: If you are using long-lived access tokens, you may need to configure the client in the authorization server to increase the maximum lifetime of the token. +Add the authentication properties as JAAS configuration (`sasl.jaas.config` and `sasl.login.callback.handler.class`). -If the Kafka client is not configured with an access token directly, the client exchanges credentials for an access token during Kafka session initiation by contacting the authorization server. -The Kafka client exchanges either: +If the client application is not configured with an access token directly, the client exchanges one of the following sets of credentials for an access token during Kafka session initiation: -* Client ID and Secret +* Client ID and secret * Client ID, refresh token, and (optionally) a secret * Username and password, with client ID and (optionally) a secret + +NOTE: You can also specify authentication properties as environment variables, or as Java system properties. +For Java system properties, you can set them using `setProperty` and pass them on the command line using the `-D` option. + +[id='con-oauth-authentication-client-credentials-{context}'] +.Example client credentials configuration +[source,properties,subs="+quotes,attributes"] +---- +security.protocol=SASL_SSL # <1> +sasl.mechanism=OAUTHBEARER # <2> +ssl.truststore.location=/tmp/truststore.p12 <3> +ssl.truststore.password=$STOREPASS +ssl.truststore.type=PKCS12 +sasl.jaas.config=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + oauth.token.endpoint.uri="" \ # <4> + oauth.client.id="" \ # <5> + oauth.client.secret="" \ # <6> + oauth.ssl.truststore.location="/tmp/oauth-truststore.p12" \ <7> + oauth.ssl.truststore.password="$STOREPASS" \ <8> + oauth.ssl.truststore.type="PKCS12" \ <9> + oauth.scope="" \ # <10> + oauth.audience="" ; # <11> +sasl.login.callback.handler.class=io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler +---- +<1> `SASL_SSL` security protocol for TLS-encrypted connections. Use `SASL_PLAINTEXT` over unencrypted connections for local development only. +<2> The SASL mechanism specified as `OAUTHBEARER` or `PLAIN`. +<3> The truststore configuration for secure access to the Kafka cluster. +<4> URI of the authorization server token endpoint. +<5> Client ID, which is the name used when creating the _client_ in the authorization server. +<6> Client secret created when creating the _client_ in the authorization server. +<7> The location contains the public key certificate (`truststore.p12`) for the authorization server. +<8> The password for accessing the truststore. +<9> The truststore type. +<10> (Optional) The `scope` for requesting the token from the token endpoint. +An authorization server may require a client to specify the scope. +<11> (Optional) The `audience` for requesting the token from the token endpoint. +An authorization server may require a client to specify the audience. + +[id='con-oauth-authentication-password-grants-{context}'] +.Example password grants configuration +[source,properties,subs="+quotes,attributes"] +---- +security.protocol=SASL_SSL +sasl.mechanism=OAUTHBEARER +ssl.truststore.location=/tmp/truststore.p12 +ssl.truststore.password=$STOREPASS +ssl.truststore.type=PKCS12 +sasl.jaas.config=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + oauth.token.endpoint.uri="" \ + oauth.client.id="" \ # <1> + oauth.client.secret="" \ # <2> + oauth.password.grant.username="" \ # <3> + oauth.password.grant.password="" \ # <4> + oauth.ssl.truststore.location="/tmp/oauth-truststore.p12" \ + oauth.ssl.truststore.password="$STOREPASS" \ + oauth.ssl.truststore.type="PKCS12" \ + oauth.scope="" \ + oauth.audience="" ; +sasl.login.callback.handler.class=io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler +---- +<1> Client ID, which is the name used when creating the _client_ in the authorization server. +<2> (Optional) Client secret created when creating the _client_ in the authorization server. +<3> Username for password grant authentication. OAuth password grant configuration (username and password) uses the OAuth 2.0 password grant method. To use password grants, create a user account for a client on your authorization server with limited permissions. The account should act like a service account. Use in environments where user accounts are required for authentication, but consider using a refresh token first. +<4> Password for password grant authentication. ++ +NOTE: SASL `PLAIN` does not support passing a username and password (password grants) using the OAuth 2.0 password grant method. + +[id='con-oauth-authentication-access-token-{context}'] +.Example access token configuration +[source,properties,subs="+quotes,attributes"] +---- +security.protocol=SASL_SSL +sasl.mechanism=OAUTHBEARER +ssl.truststore.location=/tmp/truststore.p12 +ssl.truststore.password=$STOREPASS +ssl.truststore.type=PKCS12 +sasl.jaas.config=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + oauth.token.endpoint.uri="" \ + oauth.access.token="" \ # <1> + oauth.ssl.truststore.location="/tmp/oauth-truststore.p12" \ + oauth.ssl.truststore.password="$STOREPASS" \ + oauth.ssl.truststore.type="PKCS12" ; +sasl.login.callback.handler.class=io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler +---- +<1> Long-lived access token for Kafka clients. + +[id='con-oauth-authentication-refresh-token-{context}'] +.Example refresh token configuration +[source,properties,subs="+quotes,attributes"] +---- +security.protocol=SASL_SSL +sasl.mechanism=OAUTHBEARER +ssl.truststore.location=/tmp/truststore.p12 +ssl.truststore.password=$STOREPASS +ssl.truststore.type=PKCS12 +sasl.jaas.config=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + oauth.token.endpoint.uri="" \ + oauth.client.id="" \ # <1> + oauth.client.secret="" \ # <2> + oauth.refresh.token="" \ # <3> + oauth.ssl.truststore.location="/tmp/oauth-truststore.p12" \ + oauth.ssl.truststore.password="$STOREPASS" \ + oauth.ssl.truststore.type="PKCS12" ; +sasl.login.callback.handler.class=io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler +---- +<1> Client ID, which is the name used when creating the _client_ in the authorization server. +<2> (Optional) Client secret created when creating the _client_ in the authorization server. +<3> Long-lived refresh token for Kafka clients. + diff --git a/documentation/modules/oauth/con-oauth-authentication-flow.adoc b/documentation/modules/oauth/con-oauth-authentication-flow.adoc deleted file mode 100644 index c986c745566..00000000000 --- a/documentation/modules/oauth/con-oauth-authentication-flow.adoc +++ /dev/null @@ -1,108 +0,0 @@ -// Module included in the following assemblies: -// -// assembly-oauth-authentication.adoc - -[id='con-oauth-authentication-flow-{context}'] -= OAuth 2.0 authentication mechanisms - -[role="_abstract"] -Strimzi supports the OAUTHBEARER and PLAIN mechanisms for OAuth 2.0 authentication. -Both mechanisms allow Kafka clients to establish authenticated sessions with Kafka brokers. -The authentication flow between clients, the authorization server, and Kafka brokers is different for each mechanism. - -We recommend that you configure clients to use OAUTHBEARER whenever possible. -OAUTHBEARER provides a higher level of security than PLAIN because client credentials are _never_ shared with Kafka brokers. -Consider using PLAIN only with Kafka clients that do not support OAUTHBEARER. - -You configure Kafka broker listeners to use OAuth 2.0 authentication for connecting clients. -If necessary, you can use the OAUTHBEARER and PLAIN mechanisms on the same `oauth` listener. -The properties to support each mechanism must be explicitly specified in the `oauth` listener configuration. - -.OAUTHBEARER overview - -OAUTHBEARER is automatically enabled in the `oauth` listener configuration for the Kafka broker. -You can set the `enableOauthBearer` property to `true`, though this is not required. - -[source,yaml,subs="attributes+"] ----- - # ... - authentication: - type: oauth - # ... - enableOauthBearer: true ----- - -Many Kafka client tools use libraries that provide basic support for OAUTHBEARER at the protocol level. -To support application development, Strimzi provides an _OAuth callback handler_ for the upstream Kafka Client Java libraries (but not for other libraries). -Therefore, you do not need to write your own callback handlers. -An application client can use the callback handler to provide the access token. -Clients written in other languages, such as Go, must use custom code to connect to the authorization server and obtain the access token. - -With OAUTHBEARER, the client initiates a session with the Kafka broker for credentials exchange, where credentials take the form of a bearer token provided by the callback handler. -Using the callbacks, you can configure token provision in one of three ways: - -* Client ID and Secret (by using the _OAuth 2.0 client credentials_ mechanism) - -* A long-lived access token, obtained manually at configuration time - -* A long-lived refresh token, obtained manually at configuration time - -[NOTE] -==== -OAUTHBEARER authentication can only be used by Kafka clients that support the OAUTHBEARER mechanism at the protocol level. -==== - -.PLAIN overview - -To use PLAIN, you must enable it in the `oauth` listener configuration for the Kafka broker. - -In the following example, PLAIN is enabled in addition to OAUTHBEARER, which is enabled by default. -If you want to use PLAIN only, you can disable OAUTHBEARER by setting `enableOauthBearer` to `false`. - -[source,yaml,subs="+quotes,attributes+"] ----- - # ... - authentication: - type: oauth - # ... - enablePlain: true - tokenEndpointUri: https://_OAUTH-SERVER-ADDRESS_/auth/realms/external/protocol/openid-connect/token ----- - -PLAIN is a simple authentication mechanism used by all Kafka client tools. -To enable PLAIN to be used with OAuth 2.0 authentication, Strimzi provides _OAuth 2.0 over PLAIN_ server-side callbacks. - -With the Strimzi implementation of PLAIN, the client credentials are not stored in ZooKeeper. -Instead, client credentials are handled centrally behind a compliant authorization server, similar to when OAUTHBEARER authentication is used. - -When used with the OAuth 2.0 over PLAIN callbacks, Kafka clients authenticate with Kafka brokers using either of the following methods: - -* Client ID and secret (by using the OAuth 2.0 client credentials mechanism) - -* A long-lived access token, obtained manually at configuration time - -For both methods, the client must provide the PLAIN `username` and `password` properties to pass credentials to the Kafka broker. -The client uses these properties to pass a client ID and secret or username and access token. - -Client IDs and secrets are used to obtain access tokens. - -Access tokens are passed as `password` property values. -You pass the access token with or without an `$accessToken:` prefix. - -* If you configure a token endpoint (`tokenEndpointUri`) in the listener configuration, you need the prefix. -* If you don't configure a token endpoint (`tokenEndpointUri`) in the listener configuration, you don't need the prefix. -The Kafka broker interprets the password as a raw access token. - -If the `password` is set as the access token, the `username` must be set to the same principal name that the Kafka broker obtains from the access token. -You can specify username extraction options in your listener using the `userNameClaim`, `fallbackUserNameClaim`, `fallbackUsernamePrefix`, and `userInfoEndpointUri` properties. -The username extraction process also depends on your authorization server; in particular, how it maps client IDs to account names. - -[NOTE] -==== -OAuth over PLAIN does not support `password grant` mechanism. You can only 'proxy' through SASL PLAIN mechanism the `client credentials` (clientId + secret) or the access token as described above. -==== - -[role="_additional-resources"] -.Additional resources - -* xref:proc-oauth-authentication-broker-config-{context}[] diff --git a/documentation/modules/oauth/con-oauth-authorization-intro.adoc b/documentation/modules/oauth/con-oauth-authorization-intro.adoc deleted file mode 100644 index 3e2aeda2ae1..00000000000 --- a/documentation/modules/oauth/con-oauth-authorization-intro.adoc +++ /dev/null @@ -1,28 +0,0 @@ -// Module included in the following assemblies: -// -// assembly-oauth-authorization.adoc - -[id='con-oauth-authorization-intro_{context}'] -If you are using OAuth 2.0 with Keycloak for token-based authentication, -you can also use Keycloak to configure authorization rules to constrain client access to Kafka brokers. -Authentication establishes the identity of a user. -Authorization decides the level of access for that user. - -Strimzi supports the use of OAuth 2.0 token-based authorization through Keycloak {keycloak-authorization-services}, -which allows you to manage security policies and permissions centrally. - -Security policies and permissions defined in Keycloak are used to grant access to resources on Kafka brokers. -Users and clients are matched against policies that permit access to perform specific actions on Kafka brokers. - -Kafka allows all users full access to brokers by default, -and also provides the `AclAuthorizer` and `StandardAuthorizer` plugins to configure authorization based on Access Control Lists (ACLs). -The ACL rules managed by these plugins are used to grant or deny access to resources based on the _username_, and these rules are stored within the Kafka cluster itself. -However, OAuth 2.0 token-based authorization with Keycloak offers far greater flexibility on how you wish to implement access control to Kafka brokers. -In addition, you can configure your Kafka brokers to use OAuth 2.0 authorization and ACLs. - -[role="_additional-resources"] -.Additional resources - -* xref:assembly-oauth-authentication_str[Using OAuth 2.0 token-based authentication] -* xref:con-securing-kafka-authorization-{context}[Kafka Authorization] -* {keycloak-server-doc} diff --git a/documentation/modules/oauth/con-oauth-authorization-mechanism.adoc b/documentation/modules/oauth/con-oauth-authorization-mechanism.adoc deleted file mode 100644 index 935c96f09e1..00000000000 --- a/documentation/modules/oauth/con-oauth-authorization-mechanism.adoc +++ /dev/null @@ -1,20 +0,0 @@ -// Module included in the following assemblies: -// -// assembly-oauth-authorization.adoc - -[id='con-oauth-authorization-mechanism_{context}'] -= OAuth 2.0 authorization mechanism - -OAuth 2.0 authorization in Strimzi uses Keycloak server Authorization Services REST endpoints to extend token-based authentication with Keycloak by applying defined security policies on a particular user, -and providing a list of permissions granted on different resources for that user. -Policies use roles and groups to match permissions to users. -OAuth 2.0 authorization enforces permissions locally based on the received list of grants for the user from Keycloak Authorization Services. - -== Kafka broker custom authorizer - -A Keycloak _authorizer_ (`KeycloakAuthorizer`) is provided with Strimzi. -To be able to use the Keycloak REST endpoints for Authorization Services provided by Keycloak, -you configure a custom authorizer on the Kafka broker. - -The authorizer fetches a list of granted permissions from the authorization server as needed, -and enforces authorization locally on the Kafka Broker, making rapid authorization decisions for each client request. diff --git a/documentation/modules/oauth/con-oauth-config.adoc b/documentation/modules/oauth/con-oauth-config.adoc index 59fb659b94b..831ea71206c 100644 --- a/documentation/modules/oauth/con-oauth-config.adoc +++ b/documentation/modules/oauth/con-oauth-config.adoc @@ -3,18 +3,12 @@ // assembly-oauth-authentication.adoc [id='con-oauth-strimzi-config-{context}'] -= Configuring OAuth 2.0 authentication += Example: Enabling OAuth 2.0 authentication -OAuth 2.0 is used for interaction between Kafka clients and Strimzi components. +[role="_abstract"] +This example shows how to configure client access to a Kafka cluster using OAUth 2.0 authentication. +The procedures describe the configuration required to set up OAuth 2.0 authentication on Kafka listeners, Kafka Java clients, and Kafka components. -In order to use OAuth 2.0 for Strimzi, you must: - -. xref:proc-oauth-server-config-{context}[Configure an OAuth 2.0 authorization server for the Strimzi cluster and Kafka clients] -. xref:proc-oauth-authentication-broker-config-{context}[Deploy or update the Kafka cluster with Kafka broker listeners configured to use OAuth 2.0] -. xref:proc-oauth-client-config-{context}[Update your Java-based Kafka clients to use OAuth 2.0] -. xref:proc-oauth-kafka-config-{context}[Update Kafka component clients to use OAuth 2.0] - -include::proc-oauth-server-config.adoc[leveloffset=+1] -include::proc-oauth-authentication-broker-config.adoc[leveloffset=+1] -include::proc-oauth-client-config.adoc[leveloffset=+1] -include::proc-oauth-kafka-config.adoc[leveloffset=+1] +include::../../modules/oauth/proc-oauth-authentication-broker-config.adoc[leveloffset=+1] +include::../../modules/oauth/proc-oauth-client-config.adoc[leveloffset=+1] +include::../../modules/oauth/proc-oauth-kafka-config.adoc[leveloffset=+1] diff --git a/documentation/modules/oauth/con-oauth-reauthentication.adoc b/documentation/modules/oauth/con-oauth-reauthentication.adoc index e5d488baddc..d78a10eee4c 100644 --- a/documentation/modules/oauth/con-oauth-reauthentication.adoc +++ b/documentation/modules/oauth/con-oauth-reauthentication.adoc @@ -3,16 +3,16 @@ // assembly-oauth-authentication.adoc [id='{context}'] -= Session re-authentication for Kafka brokers += Re-authenticating sessions -You can configure `oauth` listeners to use Kafka _session re-authentication_ for OAuth 2.0 sessions between Kafka clients and Kafka brokers. +Configure `oauth` listeners to use Kafka _session re-authentication_ for OAuth 2.0 sessions between Kafka clients and Kafka. This mechanism enforces the expiry of an authenticated session between the client and the broker after a defined period of time. When a session expires, the client immediately starts a new session by reusing the existing connection rather than dropping it. Session re-authentication is disabled by default. To enable it, you set a time value for `maxSecondsWithoutReauthentication` in the `oauth` listener configuration. -The same property is used to configure session re-authentication for OAUTHBEARER and PLAIN authentication. -For an example configuration, see xref:proc-oauth-authentication-broker-config-{context}[]. +The same property is used to configure session re-authentication for `OAUTHBEARER` and `PLAIN` authentication. +For an example configuration, see xref:con-oauth-authentication-broker-{context}[]. Session re-authentication must be supported by the Kafka client libraries used by the client. @@ -30,31 +30,23 @@ Session re-authentication also applies to refresh tokens, if used. When the session expires, the client refreshes the access token by using its refresh token. The client then uses the new access token to re-authenticate to the existing session. -.Session expiry for OAUTHBEARER and PLAIN +.Session expiry -When session re-authentication is configured, session expiry works differently for OAUTHBEARER and PLAIN authentication. +When session re-authentication is configured, session expiry works differently for `OAUTHBEARER` and `PLAIN` authentication. -For OAUTHBEARER and PLAIN, using the client ID and secret method: +For `OAUTHBEARER` and `PLAIN`, using the client ID and secret method: * The broker's authenticated session will expire at the configured `maxSecondsWithoutReauthentication`. * The session will expire earlier if the access token expires before the configured time. -For PLAIN using the long-lived access token method: +For `PLAIN` using the long-lived access token method: * The broker's authenticated session will expire at the configured `maxSecondsWithoutReauthentication`. * Re-authentication will fail if the access token expires before the configured time. -Although session re-authentication is attempted, PLAIN has no mechanism for refreshing tokens. +Although session re-authentication is attempted, `PLAIN` has no mechanism for refreshing tokens. -If `maxSecondsWithoutReauthentication` is _not_ configured, OAUTHBEARER and PLAIN clients can remain connected to brokers indefinitely, without needing to re-authenticate. +If `maxSecondsWithoutReauthentication` is _not_ configured, `OAUTHBEARER` and `PLAIN` clients can remain connected to brokers indefinitely, without needing to re-authenticate. Authenticated sessions do not end with access token expiry. -However, this can be considered when configuring authorization, for example, by using `keycloak` authorization or installing a custom authorizer. - -[role="_additional-resources"] -.Additional resources - -* xref:con-oauth-authentication-broker-{context}[] -* xref:proc-oauth-authentication-broker-config-{context}[] -* link:{BookURLConfiguring}#type-KafkaListenerAuthenticationOAuth-reference[`KafkaListenerAuthenticationOAuth` schema reference^] -* link:https://cwiki.apache.org/confluence/display/KAFKA/KIP-368%3A+Allow+SASL+Connections+to+Periodically+Re-Authenticate[KIP-368^] \ No newline at end of file +However, this can be considered when configuring authorization, for example, by using `keycloak` authorization or installing a custom authorizer. \ No newline at end of file diff --git a/documentation/modules/oauth/con-oauth-server-examples.adoc b/documentation/modules/oauth/con-oauth-server-examples.adoc deleted file mode 100644 index c120c83cc0f..00000000000 --- a/documentation/modules/oauth/con-oauth-server-examples.adoc +++ /dev/null @@ -1,16 +0,0 @@ -// Module included in the following assemblies: -// -// assembly-oauth-authentication.adoc - -[id='con-oauth-server-examples-{context}'] -= Authorization server examples - -When choosing an authorization server, consider the features that best support configuration of your chosen authentication flow. - -For the purposes of testing OAuth 2.0 with Strimzi, Keycloak and ORY Hydra were implemented as the OAuth 2.0 authorization server. - -For more information, see: - -* {oauth-blog} -* {oauth-demo-keycloak} -* {oauth-demo-hydra} diff --git a/documentation/modules/oauth/proc-oauth-authentication-broker-config.adoc b/documentation/modules/oauth/proc-oauth-authentication-broker-config.adoc index eadd2ac660b..d26e6043198 100644 --- a/documentation/modules/oauth/proc-oauth-authentication-broker-config.adoc +++ b/documentation/modules/oauth/proc-oauth-authentication-broker-config.adoc @@ -3,27 +3,18 @@ // con-oauth-config.adoc [id='proc-oauth-authentication-broker-config-{context}'] -= Configuring OAuth 2.0 support for Kafka brokers += Setting up OAuth 2.0 authentication on listeners -This procedure describes how to configure Kafka brokers so that the broker listeners are enabled to use OAuth 2.0 authentication using an authorization server. +[role="_abstract"] +Configure Kafka listeners so that they are enabled to use OAuth 2.0 authentication using an authorization server. -We advise use of OAuth 2.0 over an encrypted interface through through a listener with `tls: true`. +We advise using OAuth 2.0 over an encrypted interface through through a listener with `tls: true`. Plain listeners are not recommended. If the authorization server is using certificates signed by the trusted CA and matching the OAuth 2.0 server hostname, TLS connection works using the default settings. Otherwise, you may need to configure the truststore with proper certificates or disable the certificate hostname validation. -When configuring the Kafka broker you have two options for the mechanism used to validate the access token during OAuth 2.0 authentication of the newly connected Kafka client: - -* xref:example-1[Configuring fast local JWT token validation] -* xref:example-2[Configuring token validation using an introspection endpoint] - -.Before you start - -For more information on the configuration of OAuth 2.0 authentication for Kafka broker listeners, see: - -* link:{BookURLConfiguring}#type-KafkaListenerAuthenticationOAuth-reference[`KafkaListenerAuthenticationOAuth` schema reference^] -* xref:con-oauth-authentication-flow-str[OAuth 2.0 authentication mechanisms] +For more information on the configuration of OAuth 2.0 authentication for Kafka broker listeners, see the link:{BookURLConfiguring}#type-KafkaListenerAuthenticationOAuth-reference[`KafkaListenerAuthenticationOAuth` schema reference^]. .Prerequisites @@ -32,134 +23,39 @@ For more information on the configuration of OAuth 2.0 authentication for Kafka .Procedure -. Update the Kafka broker configuration (`Kafka.spec.kafka`) of your `Kafka` resource in an editor. +. Specify a listener in the `Kafka` resource with an `oauth` authentication type. + -[source,shell] +.Example listener configuration with OAuth 2.0 authentication +[source,yaml,subs="+attributes"] ---- -kubectl edit kafka my-cluster +apiVersion: {KafkaApiVersion} +kind: Kafka +spec: + kafka: + # ... + listeners: + - name: tls + port: 9093 + type: internal + tls: true + authentication: + type: oauth + - name: external3 + port: 9094 + type: loadbalancer + tls: true + authentication: + type: oauth + #... ---- -. Configure the Kafka broker `listeners` configuration. +. Configure the OAuth listener depending on the authorization server and validation type: + -The configuration for each type of listener does not have to be the same, as they are independent. -+ -The examples here show the configuration options as configured for external listeners. -+ --- -[[example-1]] -.Example 1: Configuring fast local JWT token validation -[source,yaml,subs="+quotes,attributes"] ----- -#... -- name: external3 - port: 9094 - type: loadbalancer - tls: true - authentication: - type: oauth # <1> - validIssuerUri: https:///auth/realms/external # <2> - jwksEndpointUri: https:///auth/realms/external/protocol/openid-connect/certs # <3> - userNameClaim: preferred_username # <4> - maxSecondsWithoutReauthentication: 3600 # <5> - tlsTrustedCertificates: # <6> - - secretName: oauth-server-cert - certificate: ca.crt - disableTlsHostnameVerification: true # <7> - jwksExpirySeconds: 360 # <8> - jwksRefreshSeconds: 300 # <9> - jwksMinRefreshPauseSeconds: 1 # <10> ----- -<1> Listener type set to `oauth`. -<2> URI of the token issuer used for authentication. -<3> URI of the JWKS certificate endpoint used for local JWT validation. -<4> The token claim (or key) that contains the actual username used to identify the user. Its value depends on the authorization server. If necessary, a JsonPath expression like `"['user.info'].['user.id']"` can be used to retrieve the username from nested JSON attributes within a token. -<5> (Optional) Activates the Kafka re-authentication mechanism that enforces session expiry to the same length of time as the access token. If the specified value is less than the time left for the access token to expire, then the client will have to re-authenticate before the actual token expiry. By default, the session does not expire when the access token expires, and the client does not attempt re-authentication. -<6> (Optional) Trusted certificates for TLS connection to the authorization server. -<7> (Optional) Disable TLS hostname verification. Default is `false`. -<8> The duration the JWKS certificates are considered valid before they expire. Default is `360` seconds. If you specify a longer time, consider the risk of allowing access to revoked certificates. -<9> The period between refreshes of JWKS certificates. The interval must be at least 60 seconds shorter than the expiry interval. Default is `300` seconds. -<10> The minimum pause in seconds between consecutive attempts to refresh JWKS public keys. When an unknown signing key is encountered, the JWKS keys refresh is scheduled outside the regular periodic schedule with at least the specified pause since the last refresh attempt. The refreshing of keys follows the rule of exponential backoff, retrying on unsuccessful refreshes with ever increasing pause, until it reaches `jwksRefreshSeconds`. The default value is 1. --- -+ --- -[[example-2]] -.Example 2: Configuring token validation using an introspection endpoint -[source,yaml,subs="+quotes,attributes"] ----- -- name: external3 - port: 9094 - type: loadbalancer - tls: true - authentication: - type: oauth - validIssuerUri: https:///auth/realms/external - introspectionEndpointUri: https:///auth/realms/external/protocol/openid-connect/token/introspect # <1> - clientId: kafka-broker # <2> - clientSecret: # <3> - secretName: my-cluster-oauth - key: clientSecret - userNameClaim: preferred_username # <4> - maxSecondsWithoutReauthentication: 3600 # <5> ----- -<1> URI of the token introspection endpoint. -<2> Client ID to identify the client. -<3> Client Secret and client ID is used for authentication. -<4> The token claim (or key) that contains the actual username used to identify the user. Its value depends on the authorization server. If necessary, a JsonPath expression like `"['user.info'].['user.id']"` can be used to retrieve the username from nested JSON attributes within a token. -<5> (Optional) Activates the Kafka re-authentication mechanism that enforces session expiry to the same length of time as the access token. If the specified value is less than the time left for the access token to expire, then the client will have to re-authenticate before the actual token expiry. By default, the session does not expire when the access token expires, and the client does not attempt re-authentication. --- -+ -Depending on how you apply OAuth 2.0 authentication, and the type of authorization server, there are additional (optional) configuration settings you can use: -+ --- -[source,yaml,subs="+quotes,attributes"] ----- - # ... - authentication: - type: oauth - # ... - checkIssuer: false # <1> - checkAudience: true # <2> - fallbackUserNameClaim: client_id # <3> - fallbackUserNamePrefix: client-account- # <4> - validTokenType: bearer # <5> - userInfoEndpointUri: https:///auth/realms/external/protocol/openid-connect/userinfo # <6> - enableOauthBearer: false # <7> - enablePlain: true # <8> - tokenEndpointUri: https:///auth/realms/external/protocol/openid-connect/token # <9> - customClaimCheck: "@.custom == 'custom-value'" # <10> - clientAudience: audience # <11> - clientScope: scope # <12> - connectTimeoutSeconds: 60 # <13> - readTimeoutSeconds: 60 # <14> - httpRetries: 2 # <15> - httpRetryPauseMs: 300 # <16> - groupsClaim: "$.groups" # <17> - groupsClaimDelimiter: "," # <18> - includeAcceptHeader: false # <19> ----- -<1> If your authorization server does not provide an `iss` claim, it is not possible to perform an issuer check. In this situation, set `checkIssuer` to `false` and do not specify a `validIssuerUri`. Default is `true`. -<2> If your authorization server provides an `aud` (audience) claim, and you want to enforce an audience check, set `checkAudience` to `true`. Audience checks identify the intended recipients of tokens. As a result, the Kafka broker will reject tokens that do not have its `clientId` in their `aud` claim. Default is `false`. -<3> An authorization server may not provide a single attribute to identify both regular users and clients. When a client authenticates in its own name, the server might provide a _client ID_. When a user authenticates using a username and password to obtain a refresh token or an access token, the server might provide a _username_ attribute in addition to a client ID. Use this fallback option to specify the username claim (attribute) to use if a primary user ID attribute is not available. If necessary, a JsonPath expression like `"['client.info'].['client.id']"` can be used to retrieve the fallback username to retrieve the username from nested JSON attributes within a token. -<4> In situations where `fallbackUserNameClaim` is applicable, it may also be necessary to prevent name collisions between the values of the username claim, and those of the fallback username claim. Consider a situation where a client called `producer` exists, but also a regular user called `producer` exists. In order to differentiate between the two, you can use this property to add a prefix to the user ID of the client. -<5> (Only applicable when using `introspectionEndpointUri`) Depending on the authorization server you are using, the introspection endpoint may or may not return the _token type_ attribute, or it may contain different values. You can specify a valid token type value that the response from the introspection endpoint has to contain. -<6> (Only applicable when using `introspectionEndpointUri`) The authorization server may be configured or implemented in such a way to not provide any identifiable information in an Introspection Endpoint response. In order to obtain the user ID, you can configure the URI of the `userinfo` endpoint as a fallback. The `userNameClaim`, `fallbackUserNameClaim`, and `fallbackUserNamePrefix` settings are applied to the response of `userinfo` endpoint. -<7> Set this to `false` to disable the OAUTHBEARER mechanism on the listener. At least one of PLAIN or OAUTHBEARER has to be enabled. Default is `true`. -<8> Set to `true` to enable PLAIN authentication on the listener, which is supported for clients on all platforms. -<9> Additional configuration for the PLAIN mechanism. If specified, clients can authenticate over PLAIN by passing an access token as the `password` using an `$accessToken:` prefix. -For production, always use `https://` urls. -<10> Additional custom rules can be imposed on the JWT access token during validation by setting this to a JsonPath filter query. If the access token does not contain the necessary data, it is rejected. When using the `introspectionEndpointUri`, the custom check is applied to the introspection endpoint response JSON. -<11> An `audience` parameter passed to the token endpoint. An _audience_ is used when obtaining an access token for inter-broker authentication. It is also used in the name of a client for OAuth 2.0 over PLAIN client authentication using a `clientId` and `secret`. This only affects the ability to obtain the token, and the content of the token, depending on the authorization server. It does not affect token validation rules by the listener. -<12> A `scope` parameter passed to the token endpoint. A _scope_ is used when obtaining an access token for inter-broker authentication. It is also used in the name of a client for OAuth 2.0 over PLAIN client authentication using a `clientId` and `secret`. This only affects the ability to obtain the token, and the content of the token, depending on the authorization server. It does not affect token validation rules by the listener. -<13> The connect timeout in seconds when connecting to the authorization server. The default value is 60. -<14> The read timeout in seconds when connecting to the authorization server. The default value is 60. -<15> The maximum number of times to retry a failed HTTP request to the authorization server. The default value is `0`, meaning that no retries are performed. To use this option effectively, consider reducing the timeout times for the `connectTimeoutSeconds` and `readTimeoutSeconds` options. However, note that retries may prevent the current worker thread from being available to other requests, and if too many requests stall, it could make the Kafka broker unresponsive. -<16> The time to wait before attempting another retry of a failed HTTP request to the authorization server. By default, this time is set to zero, meaning that no pause is applied. This is because many issues that cause failed requests are per-request network glitches or proxy issues that can be resolved quickly. However, if your authorization server is under stress or experiencing high traffic, you may want to set this option to a value of 100 ms or more to reduce the load on the server and increase the likelihood of successful retries. -<17> A JsonPath query that is used to extract groups information from either the JWT token or the introspection endpoint response. This option is not set by default. By configuring this option, a custom authorizer can make authorization decisions based on user groups. -<18> A delimiter used to parse groups information when it is returned as a single delimited string. The default value is ',' (comma). -<19> Some authorization servers have issues with client sending `Accept: application/json` header. By setting `includeAcceptHeader: false` the header will not be sent. Default is `true`. --- +* xref:con-oauth-authentication-broker-jwt-{context}[] +* xref:con-oauth-authentication-broker-intro-{context}[] +* xref:con-oauth-authentication-broker-additional-{context}[] -. Save and exit the editor, then wait for rolling updates to complete. +. Apply the changes to the `Kafka` configuration. . Check the update in the logs or by watching the pod state transitions: + @@ -173,4 +69,4 @@ The rolling update configures the brokers to use OAuth 2.0 authentication. .What to do next -* xref:proc-oauth-client-config-{context}[Configure your Kafka clients to use OAuth 2.0] +* xref:proc-oauth-client-config-{context}[Configure your Kafka clients to use OAuth 2.0] \ No newline at end of file diff --git a/documentation/modules/oauth/proc-oauth-authorization-broker-config.adoc b/documentation/modules/oauth/proc-oauth-authorization-broker-config.adoc index 7d71d7be37a..73b47adc324 100644 --- a/documentation/modules/oauth/proc-oauth-authorization-broker-config.adoc +++ b/documentation/modules/oauth/proc-oauth-authorization-broker-config.adoc @@ -3,9 +3,20 @@ // con-oauth-config.adoc [id='proc-oauth-authorization-broker-config-{context}'] -= Configuring OAuth 2.0 authorization support += Example: Enabling OAuth 2.0 authorization -This procedure describes how to configure Kafka brokers to use OAuth 2.0 authorization using Keycloak Authorization Services. +[role="_abstract"] +This example procedure shows how to configure Kafka to use OAuth 2.0 authorization using Keycloak Authorization Services. +To enable OAuth 2.0 authorization using Keycloak, configure the `Kafka` resource to use `keycloak` authorization and specify the properties required to access the authorization server and Keycloak Authorization Services. + +Keycloak server Authorization Services REST endpoints extend token-based authentication with Keycloak by applying defined security policies on a particular user, +and providing a list of permissions granted on different resources for that user. +Policies use roles and groups to match permissions to users. +OAuth 2.0 authorization enforces permissions locally based on the received list of grants for the user from Keycloak Authorization Services. + +A Keycloak _authorizer_ (`KeycloakAuthorizer`) is provided with Strimzi. +The authorizer fetches a list of granted permissions from the authorization server as needed, +and enforces authorization locally on Kafka, making rapid authorization decisions for each client request. .Before you begin Consider the access you require or want to limit for certain users. @@ -17,7 +28,7 @@ And roles are used to match users based on their function. With Keycloak, you can store users and groups in LDAP, whereas clients and roles cannot be stored this way. Storage and access to user data may be a factor in how you choose to configure authorization policies. -NOTE: link:{BookURLConfiguring}#property-simple-authorization-superusers-reference[Super users^] always have unconstrained access to a Kafka broker regardless of the authorization implemented on the Kafka broker. +NOTE: link:{BookURLConfiguring}#property-simple-authorization-superusers-reference[Super users^] always have unconstrained access to Kafka regardless of the authorization implemented. .Prerequisites @@ -27,20 +38,12 @@ You use the same Keycloak server endpoint when you set up authorization. .Procedure -. Access the Keycloak Admin Console or use the Keycloak Admin CLI to enable Authorization Services for the Kafka broker client you created when setting up OAuth 2.0 authentication. +. Access the Keycloak Admin Console or use the Keycloak Admin CLI to enable Authorization Services for the OAuth 2.0 client for Kafka you created when setting up OAuth 2.0 authentication. . Use Authorization Services to define resources, authorization scopes, policies, and permissions for the client. . Bind the permissions to users and clients by assigning them roles and groups. -. Configure the Kafka brokers to use Keycloak authorization by updating the Kafka broker configuration (`Kafka.spec.kafka`) of your `Kafka` resource in an editor. -+ -[source,shell] ----- -kubectl edit kafka my-cluster ----- - -. Configure the Kafka broker `kafka` configuration to use `keycloak` authorization, and to be able to access the authorization server and Authorization Services. -+ -For example: +. Configure the `kafka` resource to use `keycloak` authorization, and to be able to access the authorization server and Authorization Services. + +.Example OAuth 2.0 authorization configuration [source,yaml,subs="+quotes,attributes"] ---- apiVersion: {KafkaApiVersion} @@ -92,11 +95,11 @@ Default is `false`. <12> (Optional) Controls whether the latest grants are fetched for a new session. When enabled, grants are retrieved from Keycloak and cached for the user. The default value is `false`. <13> (Optional) The connect timeout in seconds when connecting to the Keycloak token endpoint. The default value is 60. <14> (Optional) The read timeout in seconds when connecting to the Keycloak token endpoint. The default value is 60. -<15> (Optional) The maximum number of times to retry (without pausing) a failed HTTP request to the authorization server. The default value is `0`, meaning that no retries are performed. To use this option effectively, consider reducing the timeout times for the `connectTimeoutSeconds` and `readTimeoutSeconds` options. However, note that retries may prevent the current worker thread from being available to other requests, and if too many requests stall, it could make the Kafka broker unresponsive. +<15> (Optional) The maximum number of times to retry (without pausing) a failed HTTP request to the authorization server. The default value is `0`, meaning that no retries are performed. To use this option effectively, consider reducing the timeout times for the `connectTimeoutSeconds` and `readTimeoutSeconds` options. However, note that retries may prevent the current worker thread from being available to other requests, and if too many requests stall, it could make Kafka unresponsive. <16> (Optional) Enable or disable OAuth metrics. The default value is `false`. <17> (Optional) Some authorization servers have issues with client sending `Accept: application/json` header. By setting `includeAcceptHeader: false` the header will not be sent. Default is `true`. -. Save and exit the editor, then wait for rolling updates to complete. +. Apply the changes to the `Kafka` configuration. . Check the update in the logs or by watching the pod state transitions: + @@ -108,4 +111,4 @@ kubectl get pod -w + The rolling update configures the brokers to use OAuth 2.0 authorization. -. Verify the configured permissions by accessing Kafka brokers as clients or users with specific roles, making sure they have the necessary access, or do not have the access they are not supposed to have. +. Verify the configured permissions by accessing Kafka brokers as clients or users with specific roles, ensuring they have the necessary access and do not have unauthorized access. diff --git a/documentation/modules/oauth/proc-oauth-authorization-keycloak-example.adoc b/documentation/modules/oauth/proc-oauth-authorization-keycloak-example.adoc index 07226856647..97028d892e7 100644 --- a/documentation/modules/oauth/proc-oauth-authorization-keycloak-example.adoc +++ b/documentation/modules/oauth/proc-oauth-authorization-keycloak-example.adoc @@ -3,11 +3,13 @@ // assembly-oauth-authorization.adoc [id='proc-oauth-authorization-keycloak-example_{context}'] -= Trying Keycloak Authorization Services += Example: Setting up Keycloak Authorization Services [role="_abstract"] +If you are using OAuth 2.0 with Keycloak for token-based authentication, +you can also use Keycloak to configure authorization rules to constrain client access to Kafka brokers. This example explains how to use Keycloak Authorization Services with `keycloak` authorization. -Use Keycloak Authorization Services to enforce access restrictions on Kafka clients. +Set up Keycloak Authorization Services to enforce access restrictions on Kafka clients. Keycloak Authorization Services use authorization scopes, policies and permissions to define and apply access control to resources. Keycloak Authorization Services REST endpoints provide a list of granted permissions on resources for authenticated users. @@ -26,7 +28,7 @@ You can import the realm into a Keycloak instance to set up fine-grained permiss If you want to try the example with Keycloak, use these files to perform the tasks outlined in this section in the order shown. -. xref:proc-oauth-authorization-keycloak-example-setup_{context}[Accessing the Keycloak Admin Console] +. xref:proc-oauth-authorization-keycloak-example-setup_{context}[Setting up permissions in Keycloak] . xref:proc-oauth-authorization-keycloak-example-deploy-kafka_{context}[Deploying a Kafka cluster with Keycloak authorization] . xref:proc-oauth-authorization-keycloak-example-authentication_{context}[Preparing TLS connectivity for a CLI Kafka client session] . xref:proc-oauth-authorization-keycloak-example-check_{context}[Checking authorized access to Kafka using a CLI Kafka client session] @@ -49,7 +51,7 @@ It makes sense to require the user to log in before using the resulting access t In this case, the access token represents the specific user, rather than the client application. [id='proc-oauth-authorization-keycloak-example-setup_{context}'] -== Accessing the Keycloak Admin Console +== Setting up permissions in Keycloak Set up Keycloak, then connect to its Admin Console and add the preconfigured realm. Use the example `kafka-authz-realm.json` file to import the realm. @@ -354,7 +356,7 @@ sasl.login.callback.handler.class=io.strimzi.kafka.oauth.client.JaasClientOauthL EOF ---- + -The SASL OAUTHBEARER mechanism is used. +The SASL `OAUTHBEARER` mechanism is used. This mechanism requires a client ID and client secret, which means the client first connects to the Keycloak server to obtain an access token. The client then connects to the Kafka broker and uses the access token to authenticate. @@ -674,7 +676,3 @@ bin/kafka-configs.sh --bootstrap-server my-cluster-kafka-bootstrap:9093 --comman + The cluster configuration for this example is empty. -[role="_additional-resources"] -.Additional resources -* {keycloak-server-install-doc} -* xref:con-mapping-keycloak-authz-services-to-kafka-model_authz-model[Map Keycloak Authorization Services to the Kafka authorization model] diff --git a/documentation/modules/oauth/proc-oauth-client-config.adoc b/documentation/modules/oauth/proc-oauth-client-config.adoc index fe4dc08b9a2..29030dee77f 100644 --- a/documentation/modules/oauth/proc-oauth-client-config.adoc +++ b/documentation/modules/oauth/proc-oauth-client-config.adoc @@ -3,55 +3,15 @@ // con-oauth-config.adoc [id='proc-oauth-client-config-{context}'] -= Configuring Kafka Java clients to use OAuth 2.0 += Setting up OAuth 2.0 on Kafka Java clients [role="_abstract"] Configure Kafka producer and consumer APIs to use OAuth 2.0 for interaction with Kafka brokers. Add a callback plugin to your client `pom.xml` file, then configure your client for OAuth 2.0. -Specify the following in your client configuration: - -* A SASL (Simple Authentication and Security Layer) security protocol: -** `SASL_SSL` for authentication over TLS encrypted connections -** `SASL_PLAINTEXT` for authentication over unencrypted connections -+ -Use `SASL_SSL` for production and `SASL_PLAINTEXT` for local development only. -When using `SASL_SSL`, additional `ssl.truststore` configuration is needed. -The truststore configuration is required for secure connection (`https://`) to the OAuth 2.0 authorization server. -To verify the OAuth 2.0 authorization server, add the CA certificate for the authorization server to the truststore in your client configuration. -You can configure a truststore in PEM or PKCS #12 format. - -* A Kafka SASL mechanism: -** `OAUTHBEARER` for credentials exchange using a bearer token -** `PLAIN` to pass client credentials (clientId + secret) or an access token - -* A JAAS (Java Authentication and Authorization Service) module that implements the SASL mechanism: -+ --- -** `org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule` implements the OAuthbearer mechanism -** `org.apache.kafka.common.security.plain.PlainLoginModule` implements the plain mechanism --- -+ -To be able to use the OAuthbearer mechanism, you must also add the custom `io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler` class as the callback handler. -`JaasClientOauthLoginCallbackHandler` handles OAuth callbacks to the authorization server for access tokens during client login. -This enables automatic token renewal, ensuring continuous authentication without user intervention. -Additionally, it handles login credentials for clients using the OAuth 2.0 password grant method. - -* SASL authentication properties, which support the following authentication methods: -+ --- -** OAuth 2.0 client credentials -** OAuth 2.0 password grant (deprecated) -** Access token -** Refresh token --- -+ -Add the SASL authentication properties as JAAS configuration (`sasl.jaas.config` and `sasl.login.callback.handler.class`). How you configure the authentication properties depends on the authentication method you are using to access the OAuth 2.0 authorization server. In this procedure, the properties are specified in a properties file, then loaded into the client configuration. -NOTE: You can also specify authentication properties as environment variables, or as Java system properties. For Java system properties, you can set them using `setProperty` and pass them on the command line using the `-D` option. - .Prerequisites * Strimzi and Kafka are running @@ -71,122 +31,16 @@ NOTE: You can also specify authentication properties as environment variables, o ---- -. Configure the client properties by specifying the following configuration in a properties file: -+ -* The security protocol -* The SASL mechanism -* The JAAS module and authentication properties according to the method being used -+ -For example, we can add the following to a `client.properties` file: +. Configure the client depending on the OAuth 2.0 authentication method: + -- -.Client credentials mechanism properties -[source,properties,subs="+quotes,attributes"] ----- -security.protocol=SASL_SSL # <1> -sasl.mechanism=OAUTHBEARER # <2> -ssl.truststore.location=/tmp/truststore.p12 <3> -ssl.truststore.password=$STOREPASS -ssl.truststore.type=PKCS12 -sasl.jaas.config=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ - oauth.token.endpoint.uri="" \ # <4> - oauth.client.id="" \ # <5> - oauth.client.secret="" \ # <6> - oauth.ssl.truststore.location="/tmp/oauth-truststore.p12" \ <7> - oauth.ssl.truststore.password="$STOREPASS" \ <8> - oauth.ssl.truststore.type="PKCS12" \ <9> - oauth.scope="" \ # <10> - oauth.audience="" ; # <11> -sasl.login.callback.handler.class=io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler ----- -<1> `SASL_SSL` security protocol for TLS-encrypted connections. Use `SASL_PLAINTEXT` over unencrypted connections for local development only. -<2> The SASL mechanism specified as `OAUTHBEARER` or `PLAIN`. -<3> The truststore configuration for secure access to the Kafka cluster. -<4> URI of the authorization server token endpoint. -<5> Client ID, which is the name used when creating the _client_ in the authorization server. -<6> Client secret created when creating the _client_ in the authorization server. -<7> The location contains the public key certificate (`truststore.p12`) for the authorization server. -<8> The password for accessing the truststore. -<9> The truststore type. -<10> (Optional) The `scope` for requesting the token from the token endpoint. -An authorization server may require a client to specify the scope. -<11> (Optional) The `audience` for requesting the token from the token endpoint. -An authorization server may require a client to specify the audience. --- -+ --- -.Password grants mechanism properties -[source,properties,subs="+quotes,attributes"] ----- -security.protocol=SASL_SSL -sasl.mechanism=OAUTHBEARER -ssl.truststore.location=/tmp/truststore.p12 -ssl.truststore.password=$STOREPASS -ssl.truststore.type=PKCS12 -sasl.jaas.config=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ - oauth.token.endpoint.uri="" \ - oauth.client.id="" \ # <1> - oauth.client.secret="" \ # <2> - oauth.password.grant.username="" \ # <3> - oauth.password.grant.password="" \ # <4> - oauth.ssl.truststore.location="/tmp/oauth-truststore.p12" \ - oauth.ssl.truststore.password="$STOREPASS" \ - oauth.ssl.truststore.type="PKCS12" \ - oauth.scope="" \ - oauth.audience="" ; -sasl.login.callback.handler.class=io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler ----- -<1> Client ID, which is the name used when creating the _client_ in the authorization server. -<2> (Optional) Client secret created when creating the _client_ in the authorization server. -<3> Username for password grant authentication. OAuth password grant configuration (username and password) uses the OAuth 2.0 password grant method. To use password grants, create a user account for a client on your authorization server with limited permissions. The account should act like a service account. Use in environments where user accounts are required for authentication, but consider using a refresh token first. -<4> Password for password grant authentication. -+ -NOTE: SASL PLAIN does not support passing a username and password (password grants) using the OAuth 2.0 password grant method. --- -+ --- -.Access token properties -[source,properties,subs="+quotes,attributes"] ----- -security.protocol=SASL_SSL -sasl.mechanism=OAUTHBEARER -ssl.truststore.location=/tmp/truststore.p12 -ssl.truststore.password=$STOREPASS -ssl.truststore.type=PKCS12 -sasl.jaas.config=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ - oauth.token.endpoint.uri="" \ - oauth.access.token="" \ # <1> - oauth.ssl.truststore.location="/tmp/oauth-truststore.p12" \ - oauth.ssl.truststore.password="$STOREPASS" \ - oauth.ssl.truststore.type="PKCS12" ; -sasl.login.callback.handler.class=io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler ----- -<1> Long-lived access token for Kafka clients. +* xref:con-oauth-authentication-client-credentials-{context}[] +* xref:con-oauth-authentication-password-grants-{context}[] +* xref:con-oauth-authentication-access-token-{context}[] +* xref:con-oauth-authentication-refresh-token-{context}[] -- + --- -.Refresh token properties -[source,properties,subs="+quotes,attributes"] ----- -security.protocol=SASL_SSL -sasl.mechanism=OAUTHBEARER -ssl.truststore.location=/tmp/truststore.p12 -ssl.truststore.password=$STOREPASS -ssl.truststore.type=PKCS12 -sasl.jaas.config=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ - oauth.token.endpoint.uri="" \ - oauth.client.id="" \ # <1> - oauth.client.secret="" \ # <2> - oauth.refresh.token="" \ # <3> - oauth.ssl.truststore.location="/tmp/oauth-truststore.p12" \ - oauth.ssl.truststore.password="$STOREPASS" \ - oauth.ssl.truststore.type="PKCS12" ; -sasl.login.callback.handler.class=io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler ----- -<1> Client ID, which is the name used when creating the _client_ in the authorization server. -<2> (Optional) Client secret created when creating the _client_ in the authorization server. -<3> Long-lived refresh token for Kafka clients. --- +For example, specify the properties for the authentication method in a `client.properties` file. . Input the client properties for OAUTH 2.0 authentication into the Java client code. + diff --git a/documentation/modules/oauth/proc-oauth-kafka-config.adoc b/documentation/modules/oauth/proc-oauth-kafka-config.adoc index c0c10e74bf2..06653509293 100644 --- a/documentation/modules/oauth/proc-oauth-kafka-config.adoc +++ b/documentation/modules/oauth/proc-oauth-kafka-config.adoc @@ -3,11 +3,12 @@ // con-oauth-config.adoc [id='proc-oauth-kafka-config-{context}'] -= Configuring OAuth 2.0 for Kafka components += Setting up OAuth 2.0 on Kafka components -This procedure describes how to configure Kafka components to use OAuth 2.0 authentication using an authorization server. +[role="_abstract"] +This procedure describes how to set up Kafka components to use OAuth 2.0 authentication using an authorization server. -You can configure authentication for: +You can configure OAuth 2.0 authentication for the following components: * Kafka Connect * Kafka MirrorMaker @@ -40,7 +41,7 @@ metadata: name: my-bridge-oauth type: Opaque data: - clientSecret: MGQ1OTRmMzYtZTllZS00MDY2LWI5OGEtMTM5MzM2NjdlZjQw <1> + clientSecret: MGQ1OTRmMzYtZTllZS00MDY2LWI5OGEtMTM5MzM2NjdlZjQw # <1> ---- <1> The `clientSecret` key must be in base64 format. @@ -59,6 +60,7 @@ For OAuth 2.0 authentication, you can use the following options: For example, here OAuth 2.0 is assigned to the Kafka Bridge client using a client ID and secret, and TLS: + -- +.Example OAuth 2.0 authentication configuration [source,yaml,subs="+quotes,attributes"] ---- apiVersion: {KafkaApiVersion} @@ -68,13 +70,13 @@ metadata: spec: # ... authentication: - type: oauth <1> - tokenEndpointUri: https:///auth/realms/master/protocol/openid-connect/token <2> + type: oauth # <1> + tokenEndpointUri: https:///auth/realms/master/protocol/openid-connect/token # <2> clientId: kafka-bridge clientSecret: secretName: my-bridge-oauth key: clientSecret - tlsTrustedCertificates: <3> + tlsTrustedCertificates: # <3> - secretName: oauth-server-cert certificate: tls.crt ---- @@ -86,6 +88,7 @@ spec: Depending on how you apply OAuth 2.0 authentication, and the type of authorization server, there are additional configuration options you can use: + -- +.Additional configuration options [source,yaml,subs="+quotes,attributes"] ---- # ... @@ -93,16 +96,16 @@ spec: # ... authentication: # ... - disableTlsHostnameVerification: true <1> - checkAccessTokenType: false <2> - accessTokenIsJwt: false <3> - scope: any <4> - audience: kafka <5> - connectTimeoutSeconds: 60 <6> - readTimeoutSeconds: 60 <7> - httpRetries: 2 <8> - httpRetryPauseMs: 300 <9> - includeAcceptHeader: false <10> + disableTlsHostnameVerification: true # <1> + checkAccessTokenType: false # <2> + accessTokenIsJwt: false # <3> + scope: any # <4> + audience: kafka # <5> + connectTimeoutSeconds: 60 # <6> + readTimeoutSeconds: 60 # <7> + httpRetries: 2 # <8> + httpRetryPauseMs: 300 # <9> + includeAcceptHeader: false # <10> ---- <1> (Optional) Disable TLS hostname verification. Default is `false`. <2> If the authorization server does not return a `typ` (type) claim inside the JWT token, you can apply `checkAccessTokenType: false` to skip the token type check. Default is `true`. @@ -120,12 +123,7 @@ In this case it is `kafka`. <10> (Optional) Some authorization servers have issues with client sending `Accept: application/json` header. By setting `includeAcceptHeader: false` the header will not be sent. Default is `true`. -- -. Apply the changes to the deployment of your Kafka resource. -+ -[source,yaml,subs="+quotes,attributes"] ----- -kubectl apply -f your-file ----- +. Apply the changes to the resource configuration of the component. . Check the update in the logs or by watching the pod state transitions: + diff --git a/documentation/modules/oauth/proc-oauth-server-config.adoc b/documentation/modules/oauth/proc-oauth-server-config.adoc index 8a13a87cc04..8838fa42d69 100644 --- a/documentation/modules/oauth/proc-oauth-server-config.adoc +++ b/documentation/modules/oauth/proc-oauth-server-config.adoc @@ -1,28 +1,22 @@ -// Module included in the following module: +// This assembly is included in the following assemblies: // -// con-oauth-config.adoc +// assembly-oauth-security.adoc [id='proc-oauth-server-config-{context}'] = Configuring an OAuth 2.0 authorization server -This procedure describes in general what you need to do to configure an authorization server for integration with Strimzi. - -These instructions are not product specific. - +[role="_abstract"] +Before you can use OAuth 2.0 token-based access, you must configure an authorization server for integration with Strimzi. The steps are dependent on the chosen authorization server. Consult the product documentation for the authorization server for information on how to set up OAuth 2.0 access. -NOTE: If you already have an authorization server deployed, you can skip the deployment step and use your current deployment. - -.Procedure +Prepare the authorization server to work with Strimzi by defining _OAUth 2.0 clients_ for Kafka and each Kafka client component of your application. +In relation to the authorization server, the Kafka cluster and Kafka clients are both regarded as OAuth 2.0 clients. -. Deploy the authorization server to your cluster. -. Access the CLI or admin console for the authorization server to configure OAuth 2.0 for Strimzi. -+ -Now prepare the authorization server to work with Strimzi. +In general, configure OAuth 2.0 clients in the authorization server with the following client credentials enabled: -. Configure a `kafka-broker` client. -. Configure clients for each Kafka client component of your application. +* Client ID (for example, `kafka` for the Kafka cluster) +* Client ID and secret as the authentication mechanism -.What to do next -After deploying and configuring the authorization server, xref:proc-oauth-authentication-broker-config-{context}[configure the Kafka brokers to use OAuth 2.0]. +NOTE: You only need to use a client ID and secret when using a non-public introspection endpoint of the authorization server. +The credentials are not typically required when using public authorization server endpoints, as with fast local JWT token validation. diff --git a/documentation/modules/oauth/ref-example-permissions-for-kafka-operations.adoc b/documentation/modules/oauth/ref-example-permissions-for-kafka-operations.adoc index 489dca93102..efc9999d651 100644 --- a/documentation/modules/oauth/ref-example-permissions-for-kafka-operations.adoc +++ b/documentation/modules/oauth/ref-example-permissions-for-kafka-operations.adoc @@ -1,5 +1,5 @@ [id="ref-example-permissions-for-kafka-operations_{context}"] -= Example permissions required for Kafka operations += Permissions for common Kafka operations [role="_abstract"] diff --git a/documentation/shared/attributes.adoc b/documentation/shared/attributes.adoc index 497f8763bdb..907c3189640 100644 --- a/documentation/shared/attributes.adoc +++ b/documentation/shared/attributes.adoc @@ -57,7 +57,6 @@ :keycloak-server-doc: link:https://www.keycloak.org/documentation.html[Keycloak documentation^] :keycloak-server-install-doc: link:https://www.keycloak.org/operator/installation[Installing the Keycloak Operator^] :keycloak-authorization-services: link:https://www.keycloak.org/docs/latest/authorization_services/index.html[Keycloak Authorization Services^] -:oauth-blog: link:https://strimzi.io/2019/10/25/kafka-authentication-using-oauth-2.0.html[Kafka authentication using OAuth 2.0^] :OAuthVersion: 0.15.0 :oauth-demo-keycloak: link:https://github.com/strimzi/strimzi-kafka-oauth/tree/{OAuthVersion}/examples[Using Keycloak as the OAuth 2.0 authorization server^] :oauth-demo-hydra: link:https://github.com/strimzi/strimzi-kafka-oauth/tree/{OAuthVersion}/examples/docker#running-with-hydra-using-ssl-and-opaque-tokens[Using Hydra as the OAuth 2.0 authorization server^]