Skip to content

Commit

Permalink
docs: update env vars acording to CLI output (#448)
Browse files Browse the repository at this point in the history
  • Loading branch information
rkpattnaik780 authored Apr 20, 2022
1 parent e2d4b56 commit a090a51
Show file tree
Hide file tree
Showing 7 changed files with 50 additions and 50 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@ quarkus.container-image.push=false
## ./mvnw quarkus:dev
## ./mvnw package -Dquarkus.profile=dev

%dev.kafka.bootstrap.servers=${BOOTSTRAP_SERVER}
%dev.kafka.bootstrap.servers=${KAFKA_HOST}
%dev.kafka.security.protocol=SASL_SSL

%dev.kafka.sasl.mechanism=OAUTHBEARER
%dev.kafka.sasl.jaas.config=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \
oauth.client.id="${CLIENT_ID}" \
oauth.client.secret="${CLIENT_SECRET}" \
oauth.token.endpoint.uri="${OAUTH_TOKEN_ENDPOINT_URI}" ;
oauth.client.id="${RHOAS_CLIENT_ID}" \
oauth.client.secret="${RHOAS_CLIENT_SECRET}" \
oauth.token.endpoint.uri="${RHOAS_OAUTH_TOKEN_URL}" ;
%dev.kafka.sasl.login.callback.handler.class=io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler

## sbo-dev profile that can be used for local development when using
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,23 +4,23 @@ mp.messaging.incoming.quotes.value.deserializer=io.apicurio.registry.serde.avro.
mp.messaging.incoming.quotes.apicurio.registry.use-specific-avro-reader=true
mp.messaging.incoming.quotes.apicurio.registry.avro-datum-provider=io.apicurio.registry.serde.avro.ReflectAvroDatumProvider

%dev.mp.messaging.incoming.quotes.apicurio.auth.service.url=${OAUTH_SERVER_URL:https://identity.api.openshift.com/auth}
%dev.mp.messaging.incoming.quotes.apicurio.auth.realm=${OAUTH_REALM:rhoas}
%dev.mp.messaging.incoming.quotes.apicurio.auth.client.id=${CLIENT_ID}
%dev.mp.messaging.incoming.quotes.apicurio.auth.client.secret=${CLIENT_SECRET}
%dev.mp.messaging.incoming.quotes.apicurio.auth.service.url=${RHOAS_OAUTH_TOKEN_URL:https://identity.api.openshift.com/auth}
%dev.mp.messaging.incoming.quotes.apicurio.auth.realm=${RHOAS_OAUTH_REALM:rhoas}
%dev.mp.messaging.incoming.quotes.apicurio.auth.client.id=${RHOAS_CLIENT_ID}
%dev.mp.messaging.incoming.quotes.apicurio.auth.client.secret=${RHOAS_CLIENT_SECRET}

mp.messaging.connector.smallrye-kafka.apicurio.registry.url=${REGISTRY_URL}
mp.messaging.connector.smallrye-kafka.apicurio.registry.url=${SERVICE_REGISTRY_URL}

%test.quarkus.apicurio-registry.devservices.port=8888

##Kafka servers and auth configuration

%dev.kafka.bootstrap.servers=${BOOTSTRAP_SERVER}
%dev.kafka.bootstrap.servers=${KAFKA_HOST}
%dev.kafka.security.protocol=SASL_SSL

%dev.kafka.sasl.mechanism=OAUTHBEARER
%dev.kafka.sasl.jaas.config=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \
oauth.client.id="${CLIENT_ID}" \
oauth.client.secret="${CLIENT_SECRET}" \
oauth.token.endpoint.uri="${OAUTH_TOKEN_ENDPOINT_URI}" ;
oauth.client.id="${RHOAS_CLIENT_ID}" \
oauth.client.secret="${RHOAS_CLIENT_SECRET}" \
oauth.token.endpoint.uri="${RHOAS_OAUTH_TOKEN_URL}" ;
%dev.kafka.sasl.login.callback.handler.class=io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler
Original file line number Diff line number Diff line change
Expand Up @@ -10,23 +10,23 @@ mp.messaging.outgoing.quotes.value.serializer=io.apicurio.registry.serde.avro.Av
mp.messaging.outgoing.quotes.key.serializer=org.apache.kafka.common.serialization.StringSerializer
mp.messaging.outgoing.quotes.merge=true

%dev.mp.messaging.outgoing.quotes.apicurio.auth.realm=${OAUTH_REALM:rhoas}
%dev.mp.messaging.outgoing.quotes.apicurio.auth.service.url=${OAUTH_SERVER_URL:https://identity.api.openshift.com/auth}
%dev.mp.messaging.outgoing.quotes.apicurio.auth.client.id=${CLIENT_ID}
%dev.mp.messaging.outgoing.quotes.apicurio.auth.client.secret=${CLIENT_SECRET}
%dev.mp.messaging.outgoing.quotes.apicurio.auth.realm=${RHOAS_OAUTH_REALM:rhoas}
%dev.mp.messaging.outgoing.quotes.apicurio.auth.service.url=${RHOAS_OAUTH_TOKEN_URL:https://identity.api.openshift.com/auth}
%dev.mp.messaging.outgoing.quotes.apicurio.auth.client.id=${RHOAS_CLIENT_ID}
%dev.mp.messaging.outgoing.quotes.apicurio.auth.client.secret=${RHOAS_CLIENT_SECRET}

mp.messaging.connector.smallrye-kafka.apicurio.registry.url=${REGISTRY_URL}
mp.messaging.connector.smallrye-kafka.apicurio.registry.url=${SERVICE_REGISTRY_URL}
%test.quarkus.apicurio-registry.devservices.port=8888


##Kafka servers and auth configuration

%dev.kafka.bootstrap.servers=${BOOTSTRAP_SERVER}
%dev.kafka.bootstrap.servers=${KAFKA_HOST}
%dev.kafka.security.protocol=SASL_SSL

%dev.kafka.sasl.mechanism=OAUTHBEARER
%dev.kafka.sasl.jaas.config=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \
oauth.client.id="${CLIENT_ID}" \
oauth.client.secret="${CLIENT_SECRET}" \
oauth.token.endpoint.uri="${OAUTH_TOKEN_ENDPOINT_URI}" ;
oauth.client.id="${RHOAS_CLIENT_ID}" \
oauth.client.secret="${RHOAS_CLIENT_SECRET}" \
oauth.token.endpoint.uri="${RHOAS_OAUTH_TOKEN_URL}" ;
%dev.kafka.sasl.login.callback.handler.class=io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler
18 changes: 9 additions & 9 deletions docs/kafka/kcat-kafka/README.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -130,9 +130,9 @@ endif::[]
.Setting environment variables for server and credentials
[source,subs="+quotes"]
----
$ export BOOTSTRAP_SERVER=__<bootstrap_server>__
$ export USER=__<client_id>__
$ export PASSWORD=__<client_secret>__
$ export KAFKA_HOST=__<bootstrap_server>__
$ export RHOAS_CLIENT_ID=__<client_id>__
$ export RHOAS_CLIENT_SECRET=__<client_secret>__
----
--

Expand All @@ -156,10 +156,10 @@ This example uses the SASL/PLAIN authentication mechanism with the server and cr
.Starting Kafkacat in producer mode
[source]
----
$ kafkacat -t my-first-kafka-topic -b "$BOOTSTRAP_SERVER" \
$ kafkacat -t my-first-kafka-topic -b "$KAFKA_HOST" \
-X security.protocol=SASL_SSL -X sasl.mechanisms=PLAIN \
-X sasl.username="$USER" \
-X sasl.password="$PASSWORD" -P
-X sasl.username="$RHOAS_CLIENT_ID" \
-X sasl.password="$RHOAS_CLIENT_SECRET" -P
----

NOTE: {product-kafka} also supports the SASL/OAUTHBEARER mechanism for authentication, which is the recommended authentication mechanism to use. However, Kafkacat does not yet fully support OAUTHBEARER, so this example uses SASL/PLAIN.
Expand Down Expand Up @@ -205,10 +205,10 @@ This example uses the SASL/PLAIN authentication mechanism with the server and cr
.Starting Kafkacat in consumer mode
[source]
----
$ kafkacat -t my-first-kafka-topic -b "$BOOTSTRAP_SERVER" \
$ kafkacat -t my-first-kafka-topic -b "$KAFKA_HOST" \
-X security.protocol=SASL_SSL -X sasl.mechanisms=PLAIN \
-X sasl.username="$USER" \
-X sasl.password="$PASSWORD" -C
-X sasl.username="$RHOAS_CLIENT_ID" \
-X sasl.password="$RHOAS_CLIENT_SECRET" -C
First message
Second message
Expand Down
20 changes: 10 additions & 10 deletions docs/kafka/nodejs-kafka/README.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ To enable your Node.js application to access a Kafka instance, you must configur
* The generated credentials for your {product-kafka} service account
* The Simple Authentication and Security Layer (SASL) mechanism that the client will use to authenticate with the Kafka instance

In this task, you'll create a new configuration file called `.env`. In this file, you'll set the required bootstrap server and client credentials as environment variables.
In this task, you'll create a new configuration file called `rhoas.env`. In this file, you'll set the required bootstrap server and client credentials as environment variables.

.Prerequisites
ifndef::qs[]
Expand All @@ -130,16 +130,16 @@ endif::[]

.Procedure

. In your IDE, create a new file. Save the file with the name `.env`, at the root level of the `reactive-example` directory for the cloned repository.
. In your IDE, create a new file. Save the file with the name `rhoas.env`, at the root level of the `reactive-example` directory for the cloned repository.

. In the `.env` file, add the lines shown in the example. These lines set the bootstrap server and client credentials as environment variables to be used by the Node.js application.
. In the `rhoas.env` file, add the lines shown in the example. These lines set the bootstrap server and client credentials as environment variables to be used by the Node.js application.
+
.Setting environment variables in the .env file
.Setting environment variables in the rhoas.env file
[source,subs="+quotes"]
----
KAFKA_BOOTSTRAP_SERVER=__<bootstrap_server>__
KAFKA_CLIENT_ID=__<client_id>__
KAFKA_CLIENT_SECRET=__<client_secret>__
KAFKA_HOST=__<bootstrap_server>__
RHOAS_CLIENT_ID=__<client_id>__
RHOAS_CLIENT_SECRET=__<client_secret>__
KAFKA_SASL_MECHANISM=plain
----
+
Expand All @@ -157,7 +157,7 @@ endif::[]
+
In this case, observe that the Node.js application uses the SASL/PLAIN authentication method (that is, the value of `KAFKA_SASL_MECHANISM` is set to `plain`). This means that the application uses only the client ID and client secret to authenticate with the Kafka instance. The application doesn't require an authentication token.

. Save the `.env` file.
. Save the `rhoas.env` file.

ifdef::qs[]
.Verification
Expand Down Expand Up @@ -310,11 +310,11 @@ The output from both components confirms that they successfully connected to the

. In your IDE, in the `producer-backend` directory of the repository that you cloned, open the `producer.js` file.
+
Observe that the producer component is configured to process environment variables from the `.env` file that you created. The component used the bootstrap server endpoint and client credentials stored in this file to connect to the Kafka instance.
Observe that the producer component is configured to process environment variables from the `rhoas.env` file that you created. The component used the bootstrap server endpoint and client credentials stored in this file to connect to the Kafka instance.

. In the `consumer-backend` directory, open the `consumer.js` file.
+
Observe that the consumer component is also configured to process environment variables from the `.env` file that you created.
Observe that the consumer component is also configured to process environment variables from the `rhoas.env` file that you created.

ifdef::qs[]
.Verification
Expand Down
8 changes: 4 additions & 4 deletions docs/kafka/quarkus-kafka/README.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -130,10 +130,10 @@ endif::[]
.Setting environment variables for server and credentials
[source,subs="+quotes"]
----
$ export BOOTSTRAP_SERVER=__<bootstrap_server>__
$ export CLIENT_ID=__<client_id>__
$ export CLIENT_SECRET=__<client_secret>__
$ export OAUTH_TOKEN_ENDPOINT_URI=__<oauth_token_endpoint_uri>__
$ export KAFKA_HOST=__<bootstrap_server>__
$ export RHOAS_CLIENT_ID=__<client_id>__
$ export RHOAS_CLIENT_SECRET=__<client_secret>__
$ export RHOAS_OAUTH_TOKEN_URL=__<oauth_token_endpoint_uri>__
----
--
. In the Quarkus example application, review the `src/main/resources/application.properties` file to understand how the environment variables you set in the previous step are used in your application. This example uses the `dev` configuration profile in the `application.properties` file.
Expand Down
10 changes: 5 additions & 5 deletions docs/registry/quarkus-registry/README.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -140,11 +140,11 @@ endif::[]
.Setting environment variables for server and credentials
[source,subs="+quotes"]
----
$ export BOOTSTRAP_SERVER=__<bootstrap_server>__
$ export REGISTRY_URL=__<core_registry_url>__
$ export OAUTH_TOKEN_ENDPOINT_URI=__<oauth_token_endpoint_uri>__
$ export CLIENT_ID=__<client_id>__
$ export CLIENT_SECRET=__<client_secret>__
$ export KAFKA_HOST=__<bootstrap_server>__
$ export SERVICE_REGISTRY_URL=__<core_registry_url>__
$ export RHOAS_OAUTH_TOKEN_URL=__<oauth_token_endpoint_uri>__
$ export RHOAS_CLIENT_ID=__<client_id>__
$ export RHOAS_CLIENT_SECRET=__<client_secret>__
----

. In the Quarkus example application, review the `/src/main/resources/application.properties` files in the `consumer` and `producer` sub-folders to understand how the environment variables you set in the previous step are used. This example uses the `dev` configuration profile in the `application.properties` files.
Expand Down

0 comments on commit a090a51

Please sign in to comment.