diff --git a/dataprocgdc_sparkapplication/backing_file.tf b/dataprocgdc_sparkapplication/backing_file.tf
new file mode 100644
index 00000000..c60b1199
--- /dev/null
+++ b/dataprocgdc_sparkapplication/backing_file.tf
@@ -0,0 +1,15 @@
+# This file has some scaffolding to make sure that names are unique and that
+# a region and zone are selected when you try to create your Terraform resources.
+
+locals {
+ name_suffix = "${random_pet.suffix.id}"
+}
+
+resource "random_pet" "suffix" {
+ length = 2
+}
+
+provider "google" {
+ region = "us-central1"
+ zone = "us-central1-c"
+}
diff --git a/dataprocgdc_sparkapplication/main.tf b/dataprocgdc_sparkapplication/main.tf
new file mode 100644
index 00000000..18a56f04
--- /dev/null
+++ b/dataprocgdc_sparkapplication/main.tf
@@ -0,0 +1,32 @@
+resource "google_dataproc_gdc_application_environment" "app_env" {
+ application_environment_id = "tf-e2e-spark-app-env-${local.name_suffix}"
+ serviceinstance = "do-not-delete-dataproc-gdc-instance"
+ project = "my-project-${local.name_suffix}"
+ location = "us-west2"
+ namespace = "default"
+}
+
+resource "google_dataproc_gdc_spark_application" "spark-application" {
+ spark_application_id = "tf-e2e-spark-app-${local.name_suffix}"
+ serviceinstance = "do-not-delete-dataproc-gdc-instance"
+ project = "my-project-${local.name_suffix}"
+ location = "us-west2"
+ namespace = "default"
+ labels = {
+ "test-label": "label-value"
+ }
+ annotations = {
+ "an_annotation": "annotation_value"
+ }
+ properties = {
+ "spark.executor.instances": "2"
+ }
+ application_environment = google_dataproc_gdc_application_environment.app_env.name
+ version = "1.2"
+ spark_application_config {
+ main_jar_file_uri = "file:///usr/lib/spark/examples/jars/spark-examples.jar"
+ jar_file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"]
+ archive_uris = ["file://usr/lib/spark/examples/spark-examples.jar"]
+ file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"]
+ }
+}
diff --git a/dataprocgdc_sparkapplication/motd b/dataprocgdc_sparkapplication/motd
new file mode 100644
index 00000000..45a906e8
--- /dev/null
+++ b/dataprocgdc_sparkapplication/motd
@@ -0,0 +1,7 @@
+===
+
+These examples use real resources that will be billed to the
+Google Cloud Platform project you use - so make sure that you
+run "terraform destroy" before quitting!
+
+===
diff --git a/dataprocgdc_sparkapplication/tutorial.md b/dataprocgdc_sparkapplication/tutorial.md
new file mode 100644
index 00000000..09677dbc
--- /dev/null
+++ b/dataprocgdc_sparkapplication/tutorial.md
@@ -0,0 +1,79 @@
+# Dataprocgdc Sparkapplication - Terraform
+
+## Setup
+
+
+
+Welcome to Terraform in Google Cloud Shell! We need you to let us know what project you'd like to use with Terraform.
+
+
+
+Terraform provisions real GCP resources, so anything you create in this session will be billed against this project.
+
+## Terraforming!
+
+Let's use {{project-id}} with Terraform! Click the Cloud Shell icon below to copy the command
+to your shell, and then run it from the shell by pressing Enter/Return. Terraform will pick up
+the project name from the environment variable.
+
+```bash
+export GOOGLE_CLOUD_PROJECT={{project-id}}
+```
+
+After that, let's get Terraform started. Run the following to pull in the providers.
+
+```bash
+terraform init
+```
+
+With the providers downloaded and a project set, you're ready to use Terraform. Go ahead!
+
+```bash
+terraform apply
+```
+
+Terraform will show you what it plans to do, and prompt you to accept. Type "yes" to accept the plan.
+
+```bash
+yes
+```
+
+
+## Post-Apply
+
+### Editing your config
+
+Now you've provisioned your resources in GCP! If you run a "plan", you should see no changes needed.
+
+```bash
+terraform plan
+```
+
+So let's make a change! Try editing a number, or appending a value to the name in the editor. Then,
+run a 'plan' again.
+
+```bash
+terraform plan
+```
+
+Afterwards you can run an apply, which implicitly does a plan and shows you the intended changes
+at the 'yes' prompt.
+
+```bash
+terraform apply
+```
+
+```bash
+yes
+```
+
+## Cleanup
+
+Run the following to remove the resources Terraform provisioned:
+
+```bash
+terraform destroy
+```
+```bash
+yes
+```
diff --git a/dataprocgdc_sparkapplication_basic/backing_file.tf b/dataprocgdc_sparkapplication_basic/backing_file.tf
new file mode 100644
index 00000000..c60b1199
--- /dev/null
+++ b/dataprocgdc_sparkapplication_basic/backing_file.tf
@@ -0,0 +1,15 @@
+# This file has some scaffolding to make sure that names are unique and that
+# a region and zone are selected when you try to create your Terraform resources.
+
+locals {
+ name_suffix = "${random_pet.suffix.id}"
+}
+
+resource "random_pet" "suffix" {
+ length = 2
+}
+
+provider "google" {
+ region = "us-central1"
+ zone = "us-central1-c"
+}
diff --git a/dataprocgdc_sparkapplication_basic/main.tf b/dataprocgdc_sparkapplication_basic/main.tf
new file mode 100644
index 00000000..2a5635ec
--- /dev/null
+++ b/dataprocgdc_sparkapplication_basic/main.tf
@@ -0,0 +1,12 @@
+resource "google_dataproc_gdc_spark_application" "spark-application" {
+ spark_application_id = "tf-e2e-spark-app-basic-${local.name_suffix}"
+ serviceinstance = "do-not-delete-dataproc-gdc-instance"
+ project = "my-project-${local.name_suffix}"
+ location = "us-west2"
+ namespace = "default"
+ spark_application_config {
+ main_class = "org.apache.spark.examples.SparkPi"
+ jar_file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"]
+ args = ["10000"]
+ }
+}
diff --git a/dataprocgdc_sparkapplication_basic/motd b/dataprocgdc_sparkapplication_basic/motd
new file mode 100644
index 00000000..45a906e8
--- /dev/null
+++ b/dataprocgdc_sparkapplication_basic/motd
@@ -0,0 +1,7 @@
+===
+
+These examples use real resources that will be billed to the
+Google Cloud Platform project you use - so make sure that you
+run "terraform destroy" before quitting!
+
+===
diff --git a/dataprocgdc_sparkapplication_basic/tutorial.md b/dataprocgdc_sparkapplication_basic/tutorial.md
new file mode 100644
index 00000000..94f3ce5a
--- /dev/null
+++ b/dataprocgdc_sparkapplication_basic/tutorial.md
@@ -0,0 +1,79 @@
+# Dataprocgdc Sparkapplication Basic - Terraform
+
+## Setup
+
+
+
+Welcome to Terraform in Google Cloud Shell! We need you to let us know what project you'd like to use with Terraform.
+
+
+
+Terraform provisions real GCP resources, so anything you create in this session will be billed against this project.
+
+## Terraforming!
+
+Let's use {{project-id}} with Terraform! Click the Cloud Shell icon below to copy the command
+to your shell, and then run it from the shell by pressing Enter/Return. Terraform will pick up
+the project name from the environment variable.
+
+```bash
+export GOOGLE_CLOUD_PROJECT={{project-id}}
+```
+
+After that, let's get Terraform started. Run the following to pull in the providers.
+
+```bash
+terraform init
+```
+
+With the providers downloaded and a project set, you're ready to use Terraform. Go ahead!
+
+```bash
+terraform apply
+```
+
+Terraform will show you what it plans to do, and prompt you to accept. Type "yes" to accept the plan.
+
+```bash
+yes
+```
+
+
+## Post-Apply
+
+### Editing your config
+
+Now you've provisioned your resources in GCP! If you run a "plan", you should see no changes needed.
+
+```bash
+terraform plan
+```
+
+So let's make a change! Try editing a number, or appending a value to the name in the editor. Then,
+run a 'plan' again.
+
+```bash
+terraform plan
+```
+
+Afterwards you can run an apply, which implicitly does a plan and shows you the intended changes
+at the 'yes' prompt.
+
+```bash
+terraform apply
+```
+
+```bash
+yes
+```
+
+## Cleanup
+
+Run the following to remove the resources Terraform provisioned:
+
+```bash
+terraform destroy
+```
+```bash
+yes
+```
diff --git a/dataprocgdc_sparkapplication_pyspark/backing_file.tf b/dataprocgdc_sparkapplication_pyspark/backing_file.tf
new file mode 100644
index 00000000..c60b1199
--- /dev/null
+++ b/dataprocgdc_sparkapplication_pyspark/backing_file.tf
@@ -0,0 +1,15 @@
+# This file has some scaffolding to make sure that names are unique and that
+# a region and zone are selected when you try to create your Terraform resources.
+
+locals {
+ name_suffix = "${random_pet.suffix.id}"
+}
+
+resource "random_pet" "suffix" {
+ length = 2
+}
+
+provider "google" {
+ region = "us-central1"
+ zone = "us-central1-c"
+}
diff --git a/dataprocgdc_sparkapplication_pyspark/main.tf b/dataprocgdc_sparkapplication_pyspark/main.tf
new file mode 100644
index 00000000..ea83e9d7
--- /dev/null
+++ b/dataprocgdc_sparkapplication_pyspark/main.tf
@@ -0,0 +1,17 @@
+resource "google_dataproc_gdc_spark_application" "spark-application" {
+ spark_application_id = "tf-e2e-pyspark-app-${local.name_suffix}"
+ serviceinstance = "do-not-delete-dataproc-gdc-instance"
+ project = "my-project-${local.name_suffix}"
+ location = "us-west2"
+ namespace = "default"
+ display_name = "A Pyspark application for a Terraform create test"
+ dependency_images = ["gcr.io/some/image"]
+ pyspark_application_config {
+ main_python_file_uri = "gs://goog-dataproc-initialization-actions-us-west2/conda/test_conda.py"
+ jar_file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"]
+ python_file_uris = ["gs://goog-dataproc-initialization-actions-us-west2/conda/get-sys-exec.py"]
+ file_uris = ["file://usr/lib/spark/examples/spark-examples.jar"]
+ archive_uris = ["file://usr/lib/spark/examples/spark-examples.jar"]
+ args = ["10"]
+ }
+}
diff --git a/dataprocgdc_sparkapplication_pyspark/motd b/dataprocgdc_sparkapplication_pyspark/motd
new file mode 100644
index 00000000..45a906e8
--- /dev/null
+++ b/dataprocgdc_sparkapplication_pyspark/motd
@@ -0,0 +1,7 @@
+===
+
+These examples use real resources that will be billed to the
+Google Cloud Platform project you use - so make sure that you
+run "terraform destroy" before quitting!
+
+===
diff --git a/dataprocgdc_sparkapplication_pyspark/tutorial.md b/dataprocgdc_sparkapplication_pyspark/tutorial.md
new file mode 100644
index 00000000..46896358
--- /dev/null
+++ b/dataprocgdc_sparkapplication_pyspark/tutorial.md
@@ -0,0 +1,79 @@
+# Dataprocgdc Sparkapplication Pyspark - Terraform
+
+## Setup
+
+
+
+Welcome to Terraform in Google Cloud Shell! We need you to let us know what project you'd like to use with Terraform.
+
+
+
+Terraform provisions real GCP resources, so anything you create in this session will be billed against this project.
+
+## Terraforming!
+
+Let's use {{project-id}} with Terraform! Click the Cloud Shell icon below to copy the command
+to your shell, and then run it from the shell by pressing Enter/Return. Terraform will pick up
+the project name from the environment variable.
+
+```bash
+export GOOGLE_CLOUD_PROJECT={{project-id}}
+```
+
+After that, let's get Terraform started. Run the following to pull in the providers.
+
+```bash
+terraform init
+```
+
+With the providers downloaded and a project set, you're ready to use Terraform. Go ahead!
+
+```bash
+terraform apply
+```
+
+Terraform will show you what it plans to do, and prompt you to accept. Type "yes" to accept the plan.
+
+```bash
+yes
+```
+
+
+## Post-Apply
+
+### Editing your config
+
+Now you've provisioned your resources in GCP! If you run a "plan", you should see no changes needed.
+
+```bash
+terraform plan
+```
+
+So let's make a change! Try editing a number, or appending a value to the name in the editor. Then,
+run a 'plan' again.
+
+```bash
+terraform plan
+```
+
+Afterwards you can run an apply, which implicitly does a plan and shows you the intended changes
+at the 'yes' prompt.
+
+```bash
+terraform apply
+```
+
+```bash
+yes
+```
+
+## Cleanup
+
+Run the following to remove the resources Terraform provisioned:
+
+```bash
+terraform destroy
+```
+```bash
+yes
+```
diff --git a/dataprocgdc_sparkapplication_sparkr/backing_file.tf b/dataprocgdc_sparkapplication_sparkr/backing_file.tf
new file mode 100644
index 00000000..c60b1199
--- /dev/null
+++ b/dataprocgdc_sparkapplication_sparkr/backing_file.tf
@@ -0,0 +1,15 @@
+# This file has some scaffolding to make sure that names are unique and that
+# a region and zone are selected when you try to create your Terraform resources.
+
+locals {
+ name_suffix = "${random_pet.suffix.id}"
+}
+
+resource "random_pet" "suffix" {
+ length = 2
+}
+
+provider "google" {
+ region = "us-central1"
+ zone = "us-central1-c"
+}
diff --git a/dataprocgdc_sparkapplication_sparkr/main.tf b/dataprocgdc_sparkapplication_sparkr/main.tf
new file mode 100644
index 00000000..cbc79730
--- /dev/null
+++ b/dataprocgdc_sparkapplication_sparkr/main.tf
@@ -0,0 +1,14 @@
+resource "google_dataproc_gdc_spark_application" "spark-application" {
+ spark_application_id = "tf-e2e-sparkr-app-${local.name_suffix}"
+ serviceinstance = "do-not-delete-dataproc-gdc-instance"
+ project = "my-project-${local.name_suffix}"
+ location = "us-west2"
+ namespace = "default"
+ display_name = "A SparkR application for a Terraform create test"
+ spark_r_application_config {
+ main_r_file_uri = "gs://some-bucket/something.R"
+ file_uris = ["file://usr/lib/spark/examples/spark-examples.jar"]
+ archive_uris = ["file://usr/lib/spark/examples/spark-examples.jar"]
+ args = ["10"]
+ }
+}
diff --git a/dataprocgdc_sparkapplication_sparkr/motd b/dataprocgdc_sparkapplication_sparkr/motd
new file mode 100644
index 00000000..45a906e8
--- /dev/null
+++ b/dataprocgdc_sparkapplication_sparkr/motd
@@ -0,0 +1,7 @@
+===
+
+These examples use real resources that will be billed to the
+Google Cloud Platform project you use - so make sure that you
+run "terraform destroy" before quitting!
+
+===
diff --git a/dataprocgdc_sparkapplication_sparkr/tutorial.md b/dataprocgdc_sparkapplication_sparkr/tutorial.md
new file mode 100644
index 00000000..85739102
--- /dev/null
+++ b/dataprocgdc_sparkapplication_sparkr/tutorial.md
@@ -0,0 +1,79 @@
+# Dataprocgdc Sparkapplication Sparkr - Terraform
+
+## Setup
+
+
+
+Welcome to Terraform in Google Cloud Shell! We need you to let us know what project you'd like to use with Terraform.
+
+
+
+Terraform provisions real GCP resources, so anything you create in this session will be billed against this project.
+
+## Terraforming!
+
+Let's use {{project-id}} with Terraform! Click the Cloud Shell icon below to copy the command
+to your shell, and then run it from the shell by pressing Enter/Return. Terraform will pick up
+the project name from the environment variable.
+
+```bash
+export GOOGLE_CLOUD_PROJECT={{project-id}}
+```
+
+After that, let's get Terraform started. Run the following to pull in the providers.
+
+```bash
+terraform init
+```
+
+With the providers downloaded and a project set, you're ready to use Terraform. Go ahead!
+
+```bash
+terraform apply
+```
+
+Terraform will show you what it plans to do, and prompt you to accept. Type "yes" to accept the plan.
+
+```bash
+yes
+```
+
+
+## Post-Apply
+
+### Editing your config
+
+Now you've provisioned your resources in GCP! If you run a "plan", you should see no changes needed.
+
+```bash
+terraform plan
+```
+
+So let's make a change! Try editing a number, or appending a value to the name in the editor. Then,
+run a 'plan' again.
+
+```bash
+terraform plan
+```
+
+Afterwards you can run an apply, which implicitly does a plan and shows you the intended changes
+at the 'yes' prompt.
+
+```bash
+terraform apply
+```
+
+```bash
+yes
+```
+
+## Cleanup
+
+Run the following to remove the resources Terraform provisioned:
+
+```bash
+terraform destroy
+```
+```bash
+yes
+```
diff --git a/dataprocgdc_sparkapplication_sparksql/backing_file.tf b/dataprocgdc_sparkapplication_sparksql/backing_file.tf
new file mode 100644
index 00000000..c60b1199
--- /dev/null
+++ b/dataprocgdc_sparkapplication_sparksql/backing_file.tf
@@ -0,0 +1,15 @@
+# This file has some scaffolding to make sure that names are unique and that
+# a region and zone are selected when you try to create your Terraform resources.
+
+locals {
+ name_suffix = "${random_pet.suffix.id}"
+}
+
+resource "random_pet" "suffix" {
+ length = 2
+}
+
+provider "google" {
+ region = "us-central1"
+ zone = "us-central1-c"
+}
diff --git a/dataprocgdc_sparkapplication_sparksql/main.tf b/dataprocgdc_sparkapplication_sparksql/main.tf
new file mode 100644
index 00000000..b3e95338
--- /dev/null
+++ b/dataprocgdc_sparkapplication_sparksql/main.tf
@@ -0,0 +1,17 @@
+resource "google_dataproc_gdc_spark_application" "spark-application" {
+ spark_application_id = "tf-e2e-sparksql-app-${local.name_suffix}"
+ serviceinstance = "do-not-delete-dataproc-gdc-instance"
+ project = "my-project-${local.name_suffix}"
+ location = "us-west2"
+ namespace = "default"
+ display_name = "A SparkSql application for a Terraform create test"
+ spark_sql_application_config {
+ jar_file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"]
+ query_list {
+ queries = ["show tables;"]
+ }
+ script_variables = {
+ "MY_VAR": "1"
+ }
+ }
+}
diff --git a/dataprocgdc_sparkapplication_sparksql/motd b/dataprocgdc_sparkapplication_sparksql/motd
new file mode 100644
index 00000000..45a906e8
--- /dev/null
+++ b/dataprocgdc_sparkapplication_sparksql/motd
@@ -0,0 +1,7 @@
+===
+
+These examples use real resources that will be billed to the
+Google Cloud Platform project you use - so make sure that you
+run "terraform destroy" before quitting!
+
+===
diff --git a/dataprocgdc_sparkapplication_sparksql/tutorial.md b/dataprocgdc_sparkapplication_sparksql/tutorial.md
new file mode 100644
index 00000000..bad94aa3
--- /dev/null
+++ b/dataprocgdc_sparkapplication_sparksql/tutorial.md
@@ -0,0 +1,79 @@
+# Dataprocgdc Sparkapplication Sparksql - Terraform
+
+## Setup
+
+
+
+Welcome to Terraform in Google Cloud Shell! We need you to let us know what project you'd like to use with Terraform.
+
+
+
+Terraform provisions real GCP resources, so anything you create in this session will be billed against this project.
+
+## Terraforming!
+
+Let's use {{project-id}} with Terraform! Click the Cloud Shell icon below to copy the command
+to your shell, and then run it from the shell by pressing Enter/Return. Terraform will pick up
+the project name from the environment variable.
+
+```bash
+export GOOGLE_CLOUD_PROJECT={{project-id}}
+```
+
+After that, let's get Terraform started. Run the following to pull in the providers.
+
+```bash
+terraform init
+```
+
+With the providers downloaded and a project set, you're ready to use Terraform. Go ahead!
+
+```bash
+terraform apply
+```
+
+Terraform will show you what it plans to do, and prompt you to accept. Type "yes" to accept the plan.
+
+```bash
+yes
+```
+
+
+## Post-Apply
+
+### Editing your config
+
+Now you've provisioned your resources in GCP! If you run a "plan", you should see no changes needed.
+
+```bash
+terraform plan
+```
+
+So let's make a change! Try editing a number, or appending a value to the name in the editor. Then,
+run a 'plan' again.
+
+```bash
+terraform plan
+```
+
+Afterwards you can run an apply, which implicitly does a plan and shows you the intended changes
+at the 'yes' prompt.
+
+```bash
+terraform apply
+```
+
+```bash
+yes
+```
+
+## Cleanup
+
+Run the following to remove the resources Terraform provisioned:
+
+```bash
+terraform destroy
+```
+```bash
+yes
+```
diff --git a/dataprocgdc_sparkapplication_sparksql_query_file/backing_file.tf b/dataprocgdc_sparkapplication_sparksql_query_file/backing_file.tf
new file mode 100644
index 00000000..c60b1199
--- /dev/null
+++ b/dataprocgdc_sparkapplication_sparksql_query_file/backing_file.tf
@@ -0,0 +1,15 @@
+# This file has some scaffolding to make sure that names are unique and that
+# a region and zone are selected when you try to create your Terraform resources.
+
+locals {
+ name_suffix = "${random_pet.suffix.id}"
+}
+
+resource "random_pet" "suffix" {
+ length = 2
+}
+
+provider "google" {
+ region = "us-central1"
+ zone = "us-central1-c"
+}
diff --git a/dataprocgdc_sparkapplication_sparksql_query_file/main.tf b/dataprocgdc_sparkapplication_sparksql_query_file/main.tf
new file mode 100644
index 00000000..80099bdc
--- /dev/null
+++ b/dataprocgdc_sparkapplication_sparksql_query_file/main.tf
@@ -0,0 +1,15 @@
+resource "google_dataproc_gdc_spark_application" "spark-application" {
+ spark_application_id = "tf-e2e-sparksql-app-${local.name_suffix}"
+ serviceinstance = "do-not-delete-dataproc-gdc-instance"
+ project = "my-project-${local.name_suffix}"
+ location = "us-west2"
+ namespace = "default"
+ display_name = "A SparkSql application for a Terraform create test"
+ spark_sql_application_config {
+ jar_file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"]
+ query_file_uri = "gs://some-bucket/something.sql"
+ script_variables = {
+ "MY_VAR": "1"
+ }
+ }
+}
diff --git a/dataprocgdc_sparkapplication_sparksql_query_file/motd b/dataprocgdc_sparkapplication_sparksql_query_file/motd
new file mode 100644
index 00000000..45a906e8
--- /dev/null
+++ b/dataprocgdc_sparkapplication_sparksql_query_file/motd
@@ -0,0 +1,7 @@
+===
+
+These examples use real resources that will be billed to the
+Google Cloud Platform project you use - so make sure that you
+run "terraform destroy" before quitting!
+
+===
diff --git a/dataprocgdc_sparkapplication_sparksql_query_file/tutorial.md b/dataprocgdc_sparkapplication_sparksql_query_file/tutorial.md
new file mode 100644
index 00000000..fc31453b
--- /dev/null
+++ b/dataprocgdc_sparkapplication_sparksql_query_file/tutorial.md
@@ -0,0 +1,79 @@
+# Dataprocgdc Sparkapplication Sparksql Query File - Terraform
+
+## Setup
+
+
+
+Welcome to Terraform in Google Cloud Shell! We need you to let us know what project you'd like to use with Terraform.
+
+
+
+Terraform provisions real GCP resources, so anything you create in this session will be billed against this project.
+
+## Terraforming!
+
+Let's use {{project-id}} with Terraform! Click the Cloud Shell icon below to copy the command
+to your shell, and then run it from the shell by pressing Enter/Return. Terraform will pick up
+the project name from the environment variable.
+
+```bash
+export GOOGLE_CLOUD_PROJECT={{project-id}}
+```
+
+After that, let's get Terraform started. Run the following to pull in the providers.
+
+```bash
+terraform init
+```
+
+With the providers downloaded and a project set, you're ready to use Terraform. Go ahead!
+
+```bash
+terraform apply
+```
+
+Terraform will show you what it plans to do, and prompt you to accept. Type "yes" to accept the plan.
+
+```bash
+yes
+```
+
+
+## Post-Apply
+
+### Editing your config
+
+Now you've provisioned your resources in GCP! If you run a "plan", you should see no changes needed.
+
+```bash
+terraform plan
+```
+
+So let's make a change! Try editing a number, or appending a value to the name in the editor. Then,
+run a 'plan' again.
+
+```bash
+terraform plan
+```
+
+Afterwards you can run an apply, which implicitly does a plan and shows you the intended changes
+at the 'yes' prompt.
+
+```bash
+terraform apply
+```
+
+```bash
+yes
+```
+
+## Cleanup
+
+Run the following to remove the resources Terraform provisioned:
+
+```bash
+terraform destroy
+```
+```bash
+yes
+```