diff --git a/cloudsql-postgresql-plugin/pom.xml b/cloudsql-postgresql-plugin/pom.xml
index e8736f8b..2f974e85 100644
--- a/cloudsql-postgresql-plugin/pom.xml
+++ b/cloudsql-postgresql-plugin/pom.xml
@@ -50,7 +50,7 @@
com.google.guava
guava
- 23.0
+ 31.0.1-jre
@@ -102,6 +102,7 @@
42.3.1
test
+
diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature
new file mode 100644
index 00000000..0f13bfe2
--- /dev/null
+++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature
@@ -0,0 +1,70 @@
+#
+# Copyright © 2023 Cask Data, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+#
+
+@Regression @Sink_Required
+Feature: CloudSQL-PostgreSQL sink - Verify CloudSQL-PostgreSQL sink plugin design time scenarios
+
+ Scenario: To verify CloudSQLPostgreSQL sink plugin validation with connection and basic details for connectivity
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Replace input plugin property: "tableName" with value: "targetTable"
+ Then Replace input plugin property: "dbSchemaName" with value: "schema"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+
+ Scenario: To verify CloudSQLPostgreSQL sink plugin validation with connection arguments
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Replace input plugin property: "tableName" with value: "targetTable"
+ Then Replace input plugin property: "dbSchemaName" with value: "schema"
+ Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+
+ Scenario: To verify CloudSQLPostgreSQL sink plugin validation with advanced details with connection timeout
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Replace input plugin property: "tableName" with value: "targetTable"
+ Then Replace input plugin property: "connectionTimeout" with value: "connectionTimeout"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature
new file mode 100644
index 00000000..98b6f689
--- /dev/null
+++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature
@@ -0,0 +1,53 @@
+#
+# Copyright © 2023 Cask Data, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+#
+
+@Regression @Sink_Required
+Feature: CloudSQL-PostgreSQL sink- Verify CloudSQL-PostgreSQL sink plugin design time macro scenarios
+
+ Scenario: To verify CloudSQLPostgreSQL sink plugin validation with macro enabled fields for connection section
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostGreSQLDriverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostGreSQLUser"
+ Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostGreSQLPassword"
+ Then Click on the Macro button of Property: "connectionArguments" and set the value to: "cloudSQLPostGreSQLConnectionArguments"
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Replace input plugin property: "tableName" with value: "targetTable"
+ Then Replace input plugin property: "dbSchemaName" with value: "schema"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+
+ Scenario: To verify CloudSQLPostgreSQL sink plugin validation with macro enabled fields for basic section
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Click on the Macro button of Property: "tableName" and set the value to: "cloudSQLPostGreSQLTableName"
+ Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "cloudSQLPostGreSQLSchemaName"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature
new file mode 100644
index 00000000..482ac5cb
--- /dev/null
+++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature
@@ -0,0 +1,143 @@
+#
+# Copyright © 2023 Cask Data, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+#
+
+@Regression @Sink_Required
+Feature: CloudSQL-PostgreSQL Sink - Verify CloudSQL-postgreSQL Sink Plugin Error scenarios
+
+ Scenario:Verify CloudSQLPostgreSQL sink plugin validation errors for mandatory fields
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Click on the Validate button
+ Then Verify mandatory property error for below listed properties:
+ | jdbcPluginName |
+ | referenceName |
+ | database |
+ | tableName |
+
+ Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with invalid reference test data
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "invalidRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Replace input plugin property: "tableName" with value: "targetTable"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "errorMessageCloudPostgreSQLInvalidReferenceName"
+
+ Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with invalid connection name test data
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "invalidConnectionName"
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Replace input plugin property: "tableName" with value: "targetTable"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "connectionName" is displaying an in-line error message: "errorMessageConnectionName"
+
+ @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST
+ Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with invalid database
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Click on the Get Schema button
+ Then Verify the Output Schema matches the Expected Schema: "datatypesSchema"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Replace input plugin property: "database" with value: "invalidDatabaseName"
+ Then Replace input plugin property: "tableName" with value: "targetTable"
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Click on the Validate button
+ Then Verify that the Plugin is displaying an error message: "errorMessageInvalidSinkDatabase" on the header
+
+ @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST
+ Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with invalid table name
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Click on the Get Schema button
+ Then Verify the Output Schema matches the Expected Schema: "datatypesSchema"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Replace input plugin property: "tableName" with value: "invalidTable"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "tableName" is displaying an in-line error message: "errorMessageInvalidTableName"
+
+ Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with blank username
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Replace input plugin property: "tableName" with value: "targetTable"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "user" is displaying an in-line error message: "errorMessageBlankUsername"
diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature
new file mode 100644
index 00000000..1ee11cbe
--- /dev/null
+++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature
@@ -0,0 +1,146 @@
+#
+# Copyright © 2023 Cask Data, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+#
+
+@Regression @Sink_Required
+Feature: CloudSQL-PostgreSQL sink - Verify data transfer from BigQuery source to CloudSQL-PostgreSQL sink
+
+ @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE @PLUGIN-1526
+ Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink successfully with supported datatypes
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "BigQuery" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Replace input plugin property: "project" with value: "projectId"
+ Then Enter input plugin property: "datasetProject" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqSourceTable"
+ Then Click on the Get Schema button
+ Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema"
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Replace input plugin property: "tableName" with value: "targetTable"
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Replace input plugin property: "dbSchemaName" with value: "schema"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on the Preview Data link on the Sink plugin node: "CloudSQLPostgreSQL"
+ Then Close the preview data
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Validate the values of records transferred to target CloudSQLPostGreSQL table is equal to the values from source BigQuery table
+
+ @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE @PLUGIN-1526
+ Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink successfully when connection arguments are set
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "BigQuery" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Replace input plugin property: "project" with value: "projectId"
+ Then Enter input plugin property: "datasetProject" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqSourceTable"
+ Then Click on the Get Schema button
+ Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema"
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Replace input plugin property: "tableName" with value: "targetTable"
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList"
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Replace input plugin property: "dbSchemaName" with value: "schema"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on the Preview Data link on the Sink plugin node: "CloudSQLPostgreSQL"
+ Then Close the preview data
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Validate the values of records transferred to target CloudSQLPostGreSQL table is equal to the values from source BigQuery table
+
+ @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE @PLUGIN-1526
+ Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink with Advanced property Connection timeout
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "BigQuery" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Replace input plugin property: "project" with value: "projectId"
+ Then Enter input plugin property: "datasetProject" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqSourceTable"
+ Then Click on the Get Schema button
+ Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema"
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Replace input plugin property: "tableName" with value: "targetTable"
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Replace input plugin property: "dbSchemaName" with value: "schema"
+ Then Replace input plugin property: "connectionTimeout" with value: "connectionTimeout"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on the Preview Data link on the Sink plugin node: "CloudSQLPostgreSQL"
+ Then Close the preview data
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Validate the values of records transferred to target CloudSQLPostGreSQL table is equal to the values from source BigQuery table
diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature
new file mode 100644
index 00000000..5bb22887
--- /dev/null
+++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature
@@ -0,0 +1,134 @@
+#
+# Copyright © 2023 Cask Data, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+#
+
+@Regression @Sink_Required
+Feature: CloudSQL-PostgreSQL sink - Verify data transfer to PostgreSQL sink with macro arguments
+
+ @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE @PLUGIN-1629 @PLUGIN-1526
+ Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink using macro arguments in connection section
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "BigQuery" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId"
+ Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId"
+ Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset"
+ Then Click on the Macro button of Property: "table" and set the value to: "bqTable"
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername"
+ Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Replace input plugin property: "tableName" with value: "targetTable"
+ Then Replace input plugin property: "dbSchemaName" with value: "schema"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Enter runtime argument value "projectId" for key "bqProjectId"
+ Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
+ Then Enter runtime argument value "dataset" for key "bqDataset"
+ Then Enter runtime argument value "bqSourceTable" for key "bqTable"
+ Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName"
+ Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername"
+ Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword"
+ Then Run the preview of pipeline with runtime arguments
+ Then Wait till pipeline preview is in running state
+ Then Open and capture pipeline preview logs
+ Then Verify the preview run status of pipeline in the logs is "succeeded"
+ Then Close the pipeline logs
+ Then Close the preview
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Enter runtime argument value "projectId" for key "bqProjectId"
+ Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
+ Then Enter runtime argument value "dataset" for key "bqDataset"
+ Then Enter runtime argument value "bqSourceTable" for key "bqTable"
+ Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName"
+ Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername"
+ Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword"
+ Then Run the Pipeline in Runtime with runtime arguments
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Close the pipeline logs
+ Then Validate the values of records transferred to target CloudSQLPostGreSQL table is equal to the values from source BigQuery table
+
+ @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE @PLUGIN-1629 @PLUGIN-1526
+ Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink using macro arguments in basic section
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "BigQuery" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId"
+ Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId"
+ Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset"
+ Then Click on the Macro button of Property: "table" and set the value to: "bqTable"
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Click on the Macro button of Property: "tableName" and set the value to: "cloudSQLPostgreSQLTableName"
+ Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "cloudSQLPostgreSQLSchemaName"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Enter runtime argument value "projectId" for key "bqProjectId"
+ Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
+ Then Enter runtime argument value "dataset" for key "bqDataset"
+ Then Enter runtime argument value "bqSourceTable" for key "bqTable"
+ Then Enter runtime argument value "targetTable" for key "cloudSQLPostgreSQLTableName"
+ Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName"
+ Then Run the preview of pipeline with runtime arguments
+ Then Wait till pipeline preview is in running state
+ Then Open and capture pipeline preview logs
+ Then Verify the preview run status of pipeline in the logs is "succeeded"
+ Then Close the pipeline logs
+ Then Close the preview
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Enter runtime argument value "projectId" for key "bqProjectId"
+ Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
+ Then Enter runtime argument value "dataset" for key "bqDataset"
+ Then Enter runtime argument value "bqSourceTable" for key "bqTable"
+ Then Enter runtime argument value "targetTable" for key "cloudSQLPostgreSQLTableName"
+ Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName"
+ Then Run the Pipeline in Runtime with runtime arguments
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Close the pipeline logs
+ Then Validate the values of records transferred to target CloudSQLPostGreSQL table is equal to the values from source BigQuery table
diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature
new file mode 100644
index 00000000..4302bb91
--- /dev/null
+++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature
@@ -0,0 +1,57 @@
+#
+# Copyright © 2023 Cask Data, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+#
+
+@Regression @Source_Required
+Feature: CloudSQL-PostgreSQL source - Verify CloudSQLPostgreSQL source plugin design time scenarios
+
+ @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST
+ Scenario: To verify CloudSQLPostgreSQL source plugin validation with connection and basic details for connectivity
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Click on the Get Schema button
+ Then Verify the Output Schema matches the Expected Schema: "datatypesSchema"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+
+ @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST
+ Scenario: To verify CloudSQLPostgreSQL source plugin validation with connection arguments
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Click on the Get Schema button
+ Then Verify the Output Schema matches the Expected Schema: "datatypesSchema"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature
new file mode 100644
index 00000000..c35a4b99
--- /dev/null
+++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature
@@ -0,0 +1,54 @@
+#
+# Copyright © 2023 Cask Data, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+#
+
+@Regression @Source_Required
+Feature: CloudSQL-PostgreSQL source - Verify CloudSQL-PostgreSQL source plugin design time macros scenarios
+
+ Scenario: To verify CloudSQLPostgreSQL source plugin validation with macro enabled fields for connection section
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostGreSQLDriverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostGreSQLUser"
+ Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostGreSQLPassword"
+ Then Click on the Macro button of Property: "connectionArguments" and set the value to: "cloudSQLPostGreSQLConnectionArguments"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+
+ Scenario: To verify CloudSQLPostgreSQL source plugin validation with macro enabled fields for basic section
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Click on the Macro button of Property: "splitBy" and set the value to: "cloudSQLPostGreSQLSplitBy"
+ Then Click on the Macro button of Property: "fetchSize" and set the value to: "cloudSQLPostGreSQLFetchSize"
+ Then Click on the Macro button of Property: "boundingQuery" and set the value in textarea: "cloudSQLPostGreSQLBoundingQuery"
+ Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "cloudSQLPostGreSQLImportQuery"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature
new file mode 100644
index 00000000..2ccf2a53
--- /dev/null
+++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature
@@ -0,0 +1,229 @@
+#
+# Copyright © 2023 Cask Data, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+#
+
+@Regression @Source_Required
+Feature: CloudSQL-postgreSQL source - Verify CloudSQL-postgreSQL source plugin design time validation scenarios
+
+ Scenario:Verify CloudSQLPostgreSQL source plugin validation errors for mandatory fields
+ Given Open Datafusion Project to configure pipeline
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Click on the Validate button
+ Then Verify mandatory property error for below listed properties:
+ | jdbcPluginName |
+ | database |
+ | referenceName |
+ | importQuery |
+
+ Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid reference test data
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "invalidRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "errorMessageCloudPostgreSQLInvalidReferenceName"
+
+ Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid connection name test data
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "invalidConnectionName"
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "connectionName" is displaying an in-line error message: "errorMessageConnectionName"
+
+ Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with blank bounding query
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "invalidDatabaseName"
+ Then Enter textarea plugin property: "importQuery" with value: "invalidImportQuery"
+ Then Replace input plugin property: "splitBy" with value: "splitBy"
+ Then Replace input plugin property: "numSplits" with value: "numberOfSplits"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "boundingQuery" is displaying an in-line error message: "errorMessageBoundingQuery"
+ Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageBoundingQuery"
+ Then Verify that the Plugin Property: "importQuery" is displaying an in-line error message: "errorMessageInvalidImportQuery"
+
+ Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with number of splits without split by field name
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "DatabaseName"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Replace input plugin property: "numSplits" with value: "numberOfSplits"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageBlankSplitBy"
+ Then Verify that the Plugin Property: "splitBy" is displaying an in-line error message: "errorMessageBlankSplitBy"
+
+ Scenario: To verify CloudSQLPostgreSQL source plugin validation error message when number of Split value is not a number
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "DatabaseName"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Replace input plugin property: "numSplits" with value: "zeroSplits"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageNumberOfSplitNotNumber"
+
+ Scenario: To verify CloudSQLPostgreSQL source plugin validation error message when number of Split value is changed to zero
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "DatabaseName"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Replace input plugin property: "numSplits" with value: "zeroValue"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageInvalidNumberOfSplits"
+
+ Scenario: To verify CloudSQLPostgreSQL source plugin validation error message when fetch size is changed to zero
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "DatabaseName"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Replace input plugin property: "fetchSize" with value: "zeroValue"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "fetchSize" is displaying an in-line error message: "errorMessageInvalidFetchSize"
+
+ Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid database
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "invalidDatabase"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Click on the Validate button
+ Then Verify that the Plugin is displaying an error message: "errorMessageInvalidSourceDatabase" on the header
+
+ Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid import query
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter textarea plugin property: "importQuery" with value: "invalidImportQuery"
+ Then Replace input plugin property: "numSplits" with value: "numberOfSplits"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "importQuery" is displaying an in-line error message: "errorMessageInvalidImportQuery"
+
+ Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with blank username
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter textarea plugin property: "importQuery" with value: "invalidImportQuery"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "user" is displaying an in-line error message: "errorMessageBlankUsername"
+
+ Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with blank password
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "invalidDatabase"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Click on the Validate button
+ Then Verify that the Plugin is displaying an error message: "errorMessageBlankPassword" on the header
+
+ Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid password
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "invalidPassword" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "invalidDatabase"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Click on the Validate button
+ Then Verify that the Plugin is displaying an error message: "errorMessageInvalidPassword" on the header
diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature
new file mode 100644
index 00000000..e4a898ca
--- /dev/null
+++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature
@@ -0,0 +1,237 @@
+#
+# Copyright © 2023 Cask Data, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+#
+
+@Regression @Source_Required
+Feature: CloudSQL-PostGreSQL Source - Run Time scenarios
+
+ @CLOUDSQLPOSTGRESQL_SOURCE_TEST @BQ_SINK_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @PLUGIN-1526
+ Scenario: To verify data is getting transferred from CloudSQLPostgreSQL source to BigQuery sink successfully with supported datatypes
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQuery" from the plugins list as: "Sink"
+ Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Click on the Get Schema button
+ Then Verify the Output Schema matches the Expected Schema: "datatypesSchema"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Replace input plugin property: "project" with value: "projectId"
+ Then Enter input plugin property: "datasetProject" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqTargetTable"
+ Then Click plugin property: "truncateTable"
+ Then Click plugin property: "updateTableSchema"
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Wait till pipeline preview is in running state
+ Then Open and capture pipeline preview logs
+ Then Verify the preview run status of pipeline in the logs is "succeeded"
+ Then Close the pipeline logs
+ Then Close the preview
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Close the pipeline logs
+ Then Validate the values of records transferred to target Big Query table is equal to the values from source table
+
+ @CLOUDSQLPOSTGRESQL_SOURCE_TEST @BQ_SINK_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @PLUGIN-1526
+ Scenario: To verify data is getting transferred from PostgreSQL source to BigQuery sink successfully when connection arguments are set
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQuery" from the plugins list as: "Sink"
+ Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList"
+ Then Click on the Get Schema button
+ Then Verify the Output Schema matches the Expected Schema: "datatypesSchema"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Replace input plugin property: "project" with value: "projectId"
+ Then Enter input plugin property: "datasetProject" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqTargetTable"
+ Then Click plugin property: "truncateTable"
+ Then Click plugin property: "updateTableSchema"
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Wait till pipeline preview is in running state
+ Then Open and capture pipeline preview logs
+ Then Verify the preview run status of pipeline in the logs is "succeeded"
+ Then Close the pipeline logs
+ Then Close the preview
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Close the pipeline logs
+ Then Validate the values of records transferred to target Big Query table is equal to the values from source table
+
+ @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @BQ_SINK_TEST
+ Scenario: To verify pipeline failure message in logs when an invalid bounding query is provided
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQuery" from the plugins list as: "Sink"
+ Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Replace input plugin property: "splitBy" with value: "splitBy"
+ Then Enter textarea plugin property: "importQuery" with value: "importQuery"
+ Then Click on the Get Schema button
+ Then Replace input plugin property: "numSplits" with value: "numberOfSplits"
+ Then Enter textarea plugin property: "boundingQuery" with value: "invalidBoundingQueryValue"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Replace input plugin property: "project" with value: "projectId"
+ Then Enter input plugin property: "datasetProject" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqTargetTable"
+ Then Click plugin property: "truncateTable"
+ Then Click plugin property: "updateTableSchema"
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ And Save and Deploy Pipeline
+ And Run the Pipeline in Runtime
+ And Wait till pipeline is in running state
+ And Verify the pipeline status is "Failed"
+ Then Open Pipeline logs and verify Log entries having below listed Level and Message:
+ | Level | Message |
+ | ERROR | errorLogsMessageInvalidBoundingQuery |
+
+ @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @BQ_SINK_TEST
+ Scenario: To verify the pipeline fails while preview with invalid bounding query setting the split-By field
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQuery" from the plugins list as: "Sink"
+ Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Replace input plugin property: "splitBy" with value: "splitBy"
+ Then Enter textarea plugin property: "importQuery" with value: "importQuery"
+ Then Click on the Get Schema button
+ Then Replace input plugin property: "numSplits" with value: "numberOfSplits"
+ Then Enter textarea plugin property: "boundingQuery" with value: "invalidBoundingQuery"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Replace input plugin property: "project" with value: "projectId"
+ Then Enter input plugin property: "datasetProject" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqTargetTable"
+ Then Click plugin property: "truncateTable"
+ Then Click plugin property: "updateTableSchema"
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Wait till pipeline preview is in running state
+ Then Verify the preview run status of pipeline in the logs is "failed"
+
+ @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST
+ Scenario: To verify data is getting transferred from PostgreSQL to PostgreSQL successfully with supported datatypes
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Click on the Get Schema button
+ Then Verify the Output Schema matches the Expected Schema: "datatypesSchema"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Replace input plugin property: "tableName" with value: "targetTable"
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Replace input plugin property: "dbSchemaName" with value: "schema"
+ Then Validate "CloudSQL PostgreSQL2" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on the Preview Data link on the Sink plugin node: "CloudSQLPostgreSQL"
+ Then Close the preview data
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Validate the values of records transferred to target table is equal to the values from source table
diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature
new file mode 100644
index 00000000..8c9fe1f5
--- /dev/null
+++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature
@@ -0,0 +1,334 @@
+#
+# Copyright © 2023 Cask Data, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+#
+
+@Regression @Source_Required
+Feature: CloudSQL-PostGreSQL source - Verify CloudSQL-PostGreSQL plugin data transfer with macro arguments
+
+ @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST
+ Scenario: To verify data is getting transferred from CloudSQLPostgreSQL to CloudSQLPostgreSQL successfully using macro arguments in connection section
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername"
+ Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2"
+ Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername"
+ Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword"
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Replace input plugin property: "tableName" with value: "targetTable"
+ Then Replace input plugin property: "dbSchemaName" with value: "schema"
+ Then Validate "CloudSQL PostgreSQL2" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName"
+ Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername"
+ Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword"
+ Then Run the preview of pipeline with runtime arguments
+ Then Wait till pipeline preview is in running state
+ Then Open and capture pipeline preview logs
+ Then Verify the preview run status of pipeline in the logs is "succeeded"
+ Then Close the pipeline logs
+ Then Close the preview
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName"
+ Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername"
+ Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword"
+ Then Run the Pipeline in Runtime with runtime arguments
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Close the pipeline logs
+ Then Validate the values of records transferred to target table is equal to the values from source table
+
+ @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST
+ Scenario: To verify data is getting transferred from CloudSQLPostgreSQL to CloudSQLPostgreSQL successfully using macro arguments in basic section
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Click on the Macro button of Property: "splitBy" and set the value to: "cloudSQLPostgreSQLSplitByColumn"
+ Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "cloudSQLPostgreSQLImportQuery"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Click on the Macro button of Property: "tableName" and set the value to: "cloudSQLPostgreSQLTableName"
+ Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "cloudSQLPostgreSQLSchemaName"
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Validate "CloudSQL PostgreSQL2" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Enter runtime argument value "splitByColumn" for key "cloudSQLPostgreSQLSplitByColumn"
+ Then Enter runtime argument value "selectQuery" for key "cloudSQLPostgreSQLImportQuery"
+ Then Enter runtime argument value "targetTable" for key "cloudSQLPostgreSQLTableName"
+ Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName"
+ Then Run the preview of pipeline with runtime arguments
+ Then Wait till pipeline preview is in running state
+ Then Open and capture pipeline preview logs
+ Then Verify the preview run status of pipeline in the logs is "succeeded"
+ Then Close the pipeline logs
+ Then Close the preview
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Enter runtime argument value "splitByColumn" for key "cloudSQLPostgreSQLSplitByColumn"
+ Then Enter runtime argument value "selectQuery" for key "cloudSQLPostgreSQLImportQuery"
+ Then Enter runtime argument value "targetTable" for key "cloudSQLPostgreSQLTableName"
+ Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName"
+ Then Run the Pipeline in Runtime with runtime arguments
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Close the pipeline logs
+ Then Validate the values of records transferred to target table is equal to the values from source table
+
+ @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST
+ Scenario: To verify pipeline preview fails when invalid connection details provided using macro arguments
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername"
+ Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2"
+ Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername"
+ Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Replace input plugin property: "tableName" with value: "targetTable"
+ Then Replace input plugin property: "dbSchemaName" with value: "schema"
+ Then Validate "CloudSQL PostgreSQL2" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Enter runtime argument value "invalidDriverName" for key "cloudSQLPostgreSQLDriverName"
+ Then Enter runtime argument value "invalidUserName" for key "cloudSQLPostgreSQLUsername"
+ Then Enter runtime argument value "invalidPassword" for key "cloudSQLPostgreSQLPassword"
+ Then Run the preview of pipeline with runtime arguments
+ Then Verify the preview of pipeline is "Failed"
+
+ @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST
+ Scenario: To verify pipeline preview fails when invalid basic details provided using macro arguments
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink"
+ Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "cloudSQLPostgreSQLInvalidImportQuery"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Click on the Macro button of Property: "tableName" and set the value to: "cloudSQLPostgreSQLTableName"
+ Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "cloudSQLPostgreSQLSchemaName"
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "targetRef"
+ Then Validate "CloudSQL PostgreSQL2" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Enter runtime argument value "invalidTableNameImportQuery" for key "cloudSQLPostgreSQLInvalidImportQuery"
+ Then Enter runtime argument value "invalidTable" for key "cloudSQLPostgreSQLTableName"
+ Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName"
+ Then Run the preview of pipeline with runtime arguments
+ Then Verify the preview of pipeline is "Failed"
+
+ @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @BQ_SINK_TEST @PLUGIN-1526
+ Scenario: To verify data is getting transferred from CloudSQLPostgreSQL source to BigQuery sink using macro arguments in connection section
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQuery" from the plugins list as: "Sink"
+ Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername"
+ Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Enter textarea plugin property: "importQuery" with value: "selectQuery"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId"
+ Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId"
+ Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset"
+ Then Click on the Macro button of Property: "table" and set the value to: "bqTable"
+ Then Click on the Macro button of Property: "truncateTableMacroInput" and set the value to: "bqTruncateTable"
+ Then Click on the Macro button of Property: "updateTableSchemaMacroInput" and set the value to: "bqUpdateTableSchema"
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName"
+ Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername"
+ Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword"
+ Then Enter runtime argument value "projectId" for key "bqProjectId"
+ Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
+ Then Enter runtime argument value "dataset" for key "bqDataset"
+ Then Enter runtime argument value "bqTargetTable" for key "bqTable"
+ Then Enter runtime argument value "bqTruncateTable" for key "bqTruncateTable"
+ Then Enter runtime argument value "bqUpdateTableSchema" for key "bqUpdateTableSchema"
+ Then Run the preview of pipeline with runtime arguments
+ Then Wait till pipeline preview is in running state
+ Then Open and capture pipeline preview logs
+ Then Verify the preview run status of pipeline in the logs is "succeeded"
+ Then Close the pipeline logs
+ Then Close the preview
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName"
+ Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername"
+ Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword"
+ Then Enter runtime argument value "projectId" for key "bqProjectId"
+ Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
+ Then Enter runtime argument value "dataset" for key "bqDataset"
+ Then Enter runtime argument value "bqTargetTable" for key "bqTable"
+ Then Enter runtime argument value "bqTruncateTable" for key "bqTruncateTable"
+ Then Enter runtime argument value "bqUpdateTableSchema" for key "bqUpdateTableSchema"
+ Then Run the Pipeline in Runtime with runtime arguments
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Close the pipeline logs
+ Then Validate the values of records transferred to target Big Query table is equal to the values from source table
+
+ @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @BQ_SINK_TEST @PLUGIN-1526
+ Scenario: To verify data is getting transferred from CloudSQLPostgreSQL source to BigQuery sink using macro arguments in basic section
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQuery" from the plugins list as: "Sink"
+ Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection
+ Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL"
+ Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
+ Then Select radio button plugin property: "instanceType" with value: "public"
+ Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields
+ Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
+ Then Enter input plugin property: "referenceName" with value: "sourceRef"
+ Then Replace input plugin property: "database" with value: "databaseName"
+ Then Click on the Macro button of Property: "splitBy" and set the value to: "cloudSQLPostgreSQLSplitByColumn"
+ Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "cloudSQLPostgreSQLImportQuery"
+ Then Validate "CloudSQL PostgreSQL" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId"
+ Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId"
+ Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset"
+ Then Click on the Macro button of Property: "table" and set the value to: "bqTable"
+ Then Click on the Macro button of Property: "truncateTableMacroInput" and set the value to: "bqTruncateTable"
+ Then Click on the Macro button of Property: "updateTableSchemaMacroInput" and set the value to: "bqUpdateTableSchema"
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Enter runtime argument value "splitByColumn" for key "cloudSQLPostgreSQLSplitByColumn"
+ Then Enter runtime argument value "selectQuery" for key "cloudSQLPostgreSQLImportQuery"
+ Then Enter runtime argument value "projectId" for key "bqProjectId"
+ Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
+ Then Enter runtime argument value "dataset" for key "bqDataset"
+ Then Enter runtime argument value "bqTargetTable" for key "bqTable"
+ Then Enter runtime argument value "bqTruncateTable" for key "bqTruncateTable"
+ Then Enter runtime argument value "bqUpdateTableSchema" for key "bqUpdateTableSchema"
+ Then Run the preview of pipeline with runtime arguments
+ Then Wait till pipeline preview is in running state
+ Then Open and capture pipeline preview logs
+ Then Verify the preview run status of pipeline in the logs is "succeeded"
+ Then Close the pipeline logs
+ Then Close the preview
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Enter runtime argument value "splitByColumn" for key "cloudSQLPostgreSQLSplitByColumn"
+ Then Enter runtime argument value "selectQuery" for key "cloudSQLPostgreSQLImportQuery"
+ Then Enter runtime argument value "projectId" for key "bqProjectId"
+ Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
+ Then Enter runtime argument value "dataset" for key "bqDataset"
+ Then Enter runtime argument value "bqTargetTable" for key "bqTable"
+ Then Enter runtime argument value "bqTruncateTable" for key "bqTruncateTable"
+ Then Enter runtime argument value "bqUpdateTableSchema" for key "bqUpdateTableSchema"
+ Then Run the Pipeline in Runtime with runtime arguments
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Close the pipeline logs
+ Then Validate the values of records transferred to target Big Query table is equal to the values from source table
diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/CloudSqlPostgreSqlClient.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/CloudSqlPostgreSqlClient.java
new file mode 100644
index 00000000..291e1103
--- /dev/null
+++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/CloudSqlPostgreSqlClient.java
@@ -0,0 +1,169 @@
+/*
+ * Copyright © 2023 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package io.cdap.plugin;
+
+import io.cdap.e2e.utils.PluginPropertyUtils;
+import org.junit.Assert;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.sql.Timestamp;
+import java.sql.Types;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.TimeZone;
+
+/**
+ * CloudSQLPostgreSQL client.
+ */
+
+public class CloudSqlPostgreSqlClient {
+ public static Connection getCloudSqlConnection() throws ClassNotFoundException, SQLException {
+ Class.forName("org.postgresql.Driver");
+ String database = PluginPropertyUtils.pluginProp("databaseName");
+ String instanceConnectionName = System.getenv("CLOUDSQL_POSTGRESQL_CONNECTION_NAME");
+ String username = System.getenv("CLOUDSQL_POSTGRESQL_USERNAME");
+ String password = System.getenv("CLOUDSQL_POSTGRESQL_PASSWORD");
+ String jdbcUrl = String.format(PluginPropertyUtils.pluginProp("URL"), database, instanceConnectionName, username, password);
+ Connection connection = DriverManager.getConnection(jdbcUrl);
+ return connection;
+ }
+
+ public static int countRecord(String table, String schema) throws SQLException, ClassNotFoundException {
+ String countQuery = "SELECT COUNT(*) as total FROM " + schema + "." + table;
+ try (Connection connection = getCloudSqlConnection();
+ Statement statement = connection.createStatement();
+ ResultSet rs = statement.executeQuery(countQuery)) {
+ int num = 0;
+ while (rs.next()) {
+ num = (rs.getInt(1));
+ }
+ return num;
+ }
+ }
+
+ /**
+ * Extracts Result set of source and target table.
+ * @param sourceTable table at the source side
+ * @param targetTable table at the sink side
+ * @return true if the values in source and target side are equal
+ */
+ public static boolean validateRecordValues(String sourceTable, String targetTable, String schema)
+ throws SQLException, ClassNotFoundException {
+ String getSourceQuery = "SELECT * FROM " + schema + "." + sourceTable;
+ String getTargetQuery = "SELECT * FROM " + schema + "." + targetTable;
+ try (Connection connection = getCloudSqlConnection()) {
+ connection.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT);
+ Statement statement1 = connection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE,
+ ResultSet.HOLD_CURSORS_OVER_COMMIT);
+ Statement statement2 = connection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE,
+ ResultSet.HOLD_CURSORS_OVER_COMMIT);
+ ResultSet rsSource = statement1.executeQuery(getSourceQuery);
+ ResultSet rsTarget = statement2.executeQuery(getTargetQuery);
+ return compareResultSetData(rsSource, rsTarget);
+ }
+ }
+
+ /**
+ * Compares the result Set data in source table and sink table..
+ * @param rsSource result set of the source table data
+ * @param rsTarget result set of the target table data
+ * @return true if rsSource matches rsTarget
+ */
+ public static boolean compareResultSetData(ResultSet rsSource, ResultSet rsTarget) throws SQLException {
+ ResultSetMetaData mdSource = rsSource.getMetaData();
+ ResultSetMetaData mdTarget = rsTarget.getMetaData();
+ int columnCountSource = mdSource.getColumnCount();
+ int columnCountTarget = mdTarget.getColumnCount();
+ Assert.assertEquals("Number of columns in source and target are not equal",
+ columnCountSource, columnCountTarget);
+ while (rsSource.next() && rsTarget.next()) {
+ int currentColumnCount = 1;
+ while (currentColumnCount <= columnCountSource) {
+ String columnTypeName = mdSource.getColumnTypeName(currentColumnCount);
+ int columnType = mdSource.getColumnType(currentColumnCount);
+ String columnName = mdSource.getColumnName(currentColumnCount);
+ if (columnType == Types.TIMESTAMP) {
+ GregorianCalendar gc = new GregorianCalendar(TimeZone.getTimeZone("UTC"));
+ gc.setGregorianChange(new Date(Long.MIN_VALUE));
+ Timestamp sourceTS = rsSource.getTimestamp(currentColumnCount, gc);
+ Timestamp targetTS = rsTarget.getTimestamp(currentColumnCount, gc);
+ Assert.assertEquals(String.format("Different values found for column : %s", columnName), sourceTS, targetTS);
+ } else {
+ String sourceString = rsSource.getString(currentColumnCount);
+ String targetString = rsTarget.getString(currentColumnCount);
+ Assert.assertEquals(String.format("Different values found for column : %s", columnName),
+ sourceString, targetString);
+ }
+ currentColumnCount++;
+ }
+ }
+ Assert.assertFalse("Number of rows in Source table is greater than the number of rows in Target table",
+ rsSource.next());
+ Assert.assertFalse("Number of rows in Target table is greater than the number of rows in Source table",
+ rsTarget.next());
+ return true;
+ }
+
+ public static void createSourceTable(String sourceTable, String schema) throws SQLException, ClassNotFoundException {
+ try (Connection connection = getCloudSqlConnection();
+ Statement statement = connection.createStatement()) {
+ String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns");
+ String createSourceTableQuery = "CREATE TABLE " + schema + "." + sourceTable + datatypesColumns;
+ statement.executeUpdate(createSourceTableQuery);
+
+ // Insert dummy data.
+ String datatypesValues = PluginPropertyUtils.pluginProp("datatypesValues");
+ String datatypesColumnsList = PluginPropertyUtils.pluginProp("datatypesColumnsList");
+ statement.executeUpdate("INSERT INTO " + schema + "." + sourceTable + " " + datatypesColumnsList + " " +
+ datatypesValues);
+ }
+ }
+
+ public static void createTargetTable(String targetTable, String schema) throws SQLException, ClassNotFoundException {
+ try (Connection connection = getCloudSqlConnection();
+ Statement statement = connection.createStatement()) {
+ String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns");
+ String createTargetTableQuery = "CREATE TABLE " + schema + "." + targetTable + " " + datatypesColumns;
+ statement.executeUpdate(createTargetTableQuery);
+ }
+ }
+
+ public static void createTargetPostgresqlTable(String targetTable, String schema) throws SQLException,
+ ClassNotFoundException {
+ try (Connection connection = getCloudSqlConnection();
+ Statement statement = connection.createStatement()) {
+ String datatypesColumns = PluginPropertyUtils.pluginProp("bigQueryDatatypesColumns");
+ String createTargetTableQuery = "CREATE TABLE " + schema + "." + targetTable + " " + datatypesColumns;
+ statement.executeUpdate(createTargetTableQuery);
+ }
+ }
+
+ public static void dropTables(String[] tables, String schema) throws SQLException, ClassNotFoundException {
+ try (Connection connection = getCloudSqlConnection();
+ Statement statement = connection.createStatement()) {
+ for (String table : tables) {
+ String dropTableQuery = "Drop Table " + schema + "." + table;
+ statement.executeUpdate(dropTableQuery);
+ }
+ }
+ }
+}
diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java
new file mode 100644
index 00000000..f761dc82
--- /dev/null
+++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java
@@ -0,0 +1,275 @@
+/*
+ * Copyright © 2023 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package io.cdap.plugin.cloudsqlpostgresql;
+
+import com.google.cloud.bigquery.TableResult;
+import com.google.gson.Gson;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import io.cdap.e2e.utils.BigQueryClient;
+import io.cdap.e2e.utils.PluginPropertyUtils;
+import io.cdap.plugin.CloudSqlPostgreSqlClient;
+import org.apache.spark.sql.types.Decimal;
+import org.junit.Assert;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.math.RoundingMode;
+import java.sql.*;
+import java.text.SimpleDateFormat;
+import java.time.*;
+import java.util.Date;
+import java.text.ParseException;
+import java.time.format.DateTimeFormatter;
+import java.util.*;
+
+/**
+ * BQValidation.
+ */
+public class BQValidation {
+ static List BigQueryResponse = new ArrayList<>();
+ static List