From 92795f14e39de148533880f2f66208dd4c336a70 Mon Sep 17 00:00:00 2001 From: bharatgulati Date: Tue, 16 May 2023 12:40:48 +0530 Subject: [PATCH 1/5] Design Time Scenarios (Source and Sink) --- .../sink/DesignTime.feature | 76 +++++++ .../sink/DesignTimeWithMacros.feature | 81 +++++++ .../sink/DesignTimeWithValidation.feature | 197 ++++++++++++++++ .../source/DesignTime.feature | 35 +++ .../source/DesignTimeWithMacro.feature | 48 ++++ .../source/DesignTimeWithValidation.feature | 210 ++++++++++++++++++ .../source/RunTime.feature | 60 +++++ .../cloudsqlpostgresql/BQValidation.java | 81 +++++++ .../cloudsqlpostgresql/CloudSqlClient.java | 121 ++++++++++ .../runner/package-info.java | 20 ++ .../stepsdesign/CloudSql.java | 5 + .../common/stepsdesign/TestSetUpHooks.java | 88 ++++++++ .../common/stepsdesign/package-info.java | 20 ++ .../resources/errorMessage.properties | 20 ++ .../resources/pluginParameters.properties | 42 ++++ 15 files changed, 1104 insertions(+) create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlClient.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runner/package-info.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSql.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/package-info.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature new file mode 100644 index 00000000..cf282d90 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature @@ -0,0 +1,76 @@ +# Copyright © 2023 Cask Data, Inc. +## +## Licensed under the Apache License, Version 2.0 (the "License"); you may not +## use this file except in compliance with the License. You may obtain a copy of +## the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +## License for the specific language governing permissions and limitations under +# the License.. + +@CLOUDSQL +Feature: CLOUDSQL Sink - Design time scenarios + + @CLOUDSQL_SINK @CLOUDSQL_SINK_BASIC + Scenario: To verify CLOUDSQL sink plugin validation with connection and basic details for connectivity + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Replace input plugin property: "tableName" with value: "tableName" + Then Replace input plugin property: "dbSchemaName" with value: "dbSchemaName" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + + @CLOUDSQL_SINK @CLOUDSQL_CONNECTION_ARGUMENT + Scenario: To verify CLOUDSQL sink plugin validation with connection argument + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Replace input plugin property: "tableName" with value: "tableName" + Then Replace input plugin property: "dbSchemaName" with value: "dbSchemaName" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + + @CLOUDSQL_SINK @CLOUDSINK_REFERENCE + Scenario: To verify CLOUDSQL sink plugin validation with Advanced details for connectivity + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Replace input plugin property: "tableName" with value: "tableName" + Then Replace input plugin property: "dbSchemaName" with value: "dbSchemaName" + Then Select dropdown plugin property: "transactionIsolationLevel" with option value: "transactionLevel" + Then Replace input plugin property: "connectionTimeout" with value: "connectionTimeout" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + + diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature new file mode 100644 index 00000000..0c0b70fe --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature @@ -0,0 +1,81 @@ +# Copyright © 2023 Cask Data, Inc. +## +## Licensed under the Apache License, Version 2.0 (the "License"); you may not +## use this file except in compliance with the License. You may obtain a copy of +## the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +## License for the specific language governing permissions and limitations under +# the License.. + +@CLOUDSQL +Feature: CLOUDSQL Sink - Design time with Macros + + @CLOUDSQL_SINK @CLOUDSQL_SINK_BASIC + Scenario: To verify CLOUDSQL sink plugin validation with basic details for connectivity + Given Open Datafusion Project to configure pipeline + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" +# Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields +# Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Click on the Macro button of Property: "tableName" and set the value to: "newtable" + Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "schema" + Then Validate "CloudSQL PostgreSQL2" plugin properties + + @CLOUDSQL_SINK @CLOUDSQL_SINK_CONNECTION + Scenario: To verify CLOUDSQL sink plugin validation with connection details for connectivity + Given Open Datafusion Project to configure pipeline + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Click on the Macro button of Property: "select-jdbcPluginName" and set the value in textarea: "abcd" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Click on the Macro button of Property: "user" and set the value to: "user" + Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Click on the Macro button of Property: "connectionArguments" and set the value to: "1,key" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Replace input plugin property: "tableName" with value: "tableName" + Then Replace input plugin property: "dbSchemaName" with value: "dbSchemaName" + Then Validate "CloudSQL PostgreSQL2" plugin properties \ No newline at end of file diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature new file mode 100644 index 00000000..4c46081d --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature @@ -0,0 +1,197 @@ +# Copyright © 2023 Cask Data, Inc. +## +## Licensed under the Apache License, Version 2.0 (the "License"); you may not +## use this file except in compliance with the License. You may obtain a copy of +## the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +## License for the specific language governing permissions and limitations under +# the License.. + +@CLOUDSQL_SINK @CLOUDSQL_SINK_DATABASE_NAME +Feature: CLOUDSQL Sink - Design time with Validation scenarios + + Scenario: To verify CLOUDSQL sink plugin validation with database name + Given Open Datafusion Project to configure pipeline + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "tableName" with value: "tableName" + Then Replace input plugin property: "dbSchemaName" with value: "dbSchemaName" + Then Replace input plugin property: "database" with value: "invalidDatabase" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "invalidMessageDatabaseName" on the header + + @CLOUDSQL_SINK @CLOUDSQL_SINK_CONNECTION_NAME + Scenario: To verify CLOUDSQL sink plugin validation with connection name + Given Open Datafusion Project to configure pipeline + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "invalidConnectionName" + Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Replace input plugin property: "tableName" with value: "tableName" + Then Replace input plugin property: "dbSchemaName" with value: "dbSchemaName" + Then Click on the Validate button + Then Verify that the Plugin Property: "connectionName" is displaying an in-line error message: "invalidMessageConnectionName" + + @CLOUDSQL_SINK @CLOUDSQL_SINK_TABLE_NAME_FIELD + Scenario: To verify CLOUDSQL sink plugin validation with table name field + Given Open Datafusion Project to configure pipeline + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Replace input plugin property: "tableName" with value: "invalidTableName" + Then Replace input plugin property: "dbSchemaName" with value: "dbSchemaName" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "invalidMessageTableNameField" on the header + + @CLOUDSQL_SINK @CLOUDSQL_SINK_REFERENCE_NAME + Scenario: To verify CLOUDSQL sink plugin validation with Advanced details for connectivity + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Replace input plugin property: "tableName" with value: "validTableName" + Then Enter input plugin property: "referenceName" with value: "wrongReferenceName" + Then Click on the Validate button + Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "invalidReferenceName" + + @@CLOUDSQL_SINK @CLOUDSQL_SINK_USERNAME + Scenario: To verify CLOUDSQL sink plugin validation with username + Given Open Datafusion Project to configure pipeline + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "invalidUSERNAME" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Replace input plugin property: "tableName" with value: "tableName" + Then Replace input plugin property: "dbSchemaName" with value: "dbSchemaName" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "invalidUSERNAME" on the header + + @CLOUDSQL_SINK @CLOUDSQL_SINK_BASIC + Scenario: To verify CLOUDSQL sink plugin validation with connection and basic details for connectivity + Given Open Datafusion Project to configure pipeline + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Replace input plugin property: "tableName" with value: "tableName" + Then Replace input plugin property: "dbSchemaName" with value: "dbSchemaName" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page \ No newline at end of file diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature new file mode 100644 index 00000000..da354a8a --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature @@ -0,0 +1,35 @@ +# Copyright © 2023 Cask Data, Inc. +## +## Licensed under the Apache License, Version 2.0 (the "License"); you may not +## use this file except in compliance with the License. You may obtain a copy of +## the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +## License for the specific language governing permissions and limitations under +# the License.. + +@CLOUDSQL +Feature: CLOUDSQL Source - Design time scenarios + + @CLOUDSQL_SOURCE_TEST + Scenario: Verify user is able to validate the plugin when configured for basic Section. + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature new file mode 100644 index 00000000..ff935282 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature @@ -0,0 +1,48 @@ +# Copyright © 2023 Cask Data, Inc. +## +## Licensed under the Apache License, Version 2.0 (the "License"); you may not +## use this file except in compliance with the License. You may obtain a copy of +## the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +## License for the specific language governing permissions and limitations under +# the License.. + +@CLOUDSQL +Feature: CloudSQL-postgreSQL source - Verify CloudSQL-postgreSQL source plugin design time Macros scenarios + + Scenario: Verify user should be able to validate plugin with macros for Connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-postgresql" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Click on the Macro button of Property: "user" and set the value to: "user" + Then Click on the Macro button of Property: "password" and set the value to: "pass" + Then Click on the Macro button of Property: "connectionArguments" and set the value to: "1,key" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + + Scenario: Verify user should be able to validate plugin with macros for Basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "Select * from auto.newtable;" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page \ No newline at end of file diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature new file mode 100644 index 00000000..d780b45f --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature @@ -0,0 +1,210 @@ +# Copyright © 2023 Cask Data, Inc. +## +## Licensed under the Apache License, Version 2.0 (the "License"); you may not +## use this file except in compliance with the License. You may obtain a copy of +## the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +## License for the specific language governing permissions and limitations under +# the License.. + +@CLOUDSQL +Feature: CloudSQL-postgreSQL source - Verify CloudSQL-postgreSQL source plugin design time validation scenarios + + @CLOUDSQL_SOURCE_TEST @CLOUDSQL_DATABASE + Scenario: Verify Database field validation error message with invalid test data + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "null" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + + @CLOUDSQL_SOURCE_TEST @CLOUDSQL_IMPORTQUERY + Scenario: Verify ImportQuery Field validation error message with invalid test data + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "wrongImportQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + + @CLOUDSQL_SOURCE_TEST @CLOUDSQL_REFERENCENAME + Scenario: Verify Reference Name field validation error message with invalid test data + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "wrongReferenceName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "rightImportQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + + @CLOUDSQL_SOURCE_TEST @CLOUDSQL_USERNAME + Scenario: Verify UserName field validation error message with invalid test data + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "wrongUserName" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "rightImportQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + + @CLOUDSQL_SOURCE_TEST @CLOUDSQL_PASSWORD + Scenario: Verify Password field validation error message with invalid test data + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "wrongPassword" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "rightImportQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + + + @CLOUDSQL_SOURCE_TEST @CLOUDSQL_SIZE + Scenario: Verify fetch size field validation error message with invalid test data + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "rightImportQuery" + Then Replace input plugin property: "fetchSize" with value: "zeroValue" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + + @CLOUDSQL_SOURCE_TEST @CLOUDSQL_SPLIT + Scenario: To Verify the Split-By field validation error message when number of Split value is changed to zero + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "rightImportQuery" + Then Replace input plugin property: "fetchSize" with value: "fetchSize" + Then Replace input plugin property: "numSplits" with value: "invalidSplitNumber" + Then Click on the Validate button + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMesagesNumberOfSplit" + Then Verify that the Plugin Property: "splitBy" is displaying an in-line error message: "errorMessagesSplitColumn" + Then Verify that the Plugin Property: "boundingQuery" is displaying an in-line error message: "errorMessagesBoundingQuery" + Then Verify that the Plugin Property: "importQuery" is displaying an in-line error message: "errorMessagesimportQuery" + + @CLOUDSQL_SOURCE_TEST @CLOUDSQL_SPLIT + Scenario: To Verify the Split-By field validation error message when number of Split value is not a number + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "rightImportQuery" + Then Replace input plugin property: "fetchSize" with value: "fetchSize" + Then Replace input plugin property: "numSplits" with value: "invalidSplit" + Then Click on the Validate button + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageNumberOfSplitWONumber" + + @CLOUDSQL_SOURCE_TEST @CLOUDSQL_BOUNDING_QUERY + Scenario: Verify the Bounding Query validation error when Split-By and Number of Splits values are not provided + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "rightImportQuery" + Then Replace input plugin property: "fetchSize" with value: "fetchSize" + Then Replace input plugin property: "splitBy" with value: "splitBy" + Then Replace input plugin property: "numSplits" with value: "blankSplit" + Then Click on the Validate button + Then Verify that the Plugin Property: "boundingQuery" is displaying an in-line error message: "errorMessagesBoundingQuery" + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMesagesNumberOfSplit" + Then Verify that the Plugin Property: "splitBy" is displaying an in-line error message: "errorMessagesSplitColumn" + + @CLOUDSQL_SOURCE_TEST @CLOUDSQL_GETSCHEMA + Scenario: Verify user is able to click on GetSchema button and schema is retrieved after providing the basic connection property details + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "rightImportQuery" + Then Click on the Get Schema button + + + + + diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature new file mode 100644 index 00000000..06dc2fdb --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature @@ -0,0 +1,60 @@ +# Copyright © 2023 Cask Data, Inc. +## +## Licensed under the Apache License, Version 2.0 (the "License"); you may not +## use this file except in compliance with the License. You may obtain a copy of +## the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +## License for the specific language governing permissions and limitations under +# the License.. + +@CLOUDSQL +Feature: CLOUDSQL Source - Run Time scenarios + + Scenario: Verify user should be able to preview the pipeline when plugin is configured for fetching table details + within database using CloudSQLPostGreSQL source to BQ sink + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "test_automation_db" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Expand Plugin group in the LHS plugins list: "Sink" + Then Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate OUT record count is equal to records transferred to target BigQuery table + Then Validate the values of records transferred to target Big Query table is equal to the values from source table \ No newline at end of file diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java new file mode 100644 index 00000000..ea99bcc8 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java @@ -0,0 +1,81 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package io.cdap.plugin.cloudsqlpostgresql; + +import com.simba.googlebigquery.jdbc.DataSource; +import io.cdap.e2e.utils.PluginPropertyUtils; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.List; + +/** + * Big Query client. + */ +public class BQValidation { + + /** + * Extracts entire data from source and target tables. + * @param sourceTable table at the source side + * @param targetTable table at the BigQuery side + * @return true if the values in source and target side are equal + */ + public static boolean validateBQAndDBRecordValues(String schema, String sourceTable, String targetTable) + throws SQLException, ClassNotFoundException, InterruptedException { + String getSourceQuery = "SELECT * FROM " + schema + "." + sourceTable; + + try (Connection connect = CloudSqlClient.getCloudSqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + + ResultSet rsSource = statement1.executeQuery(getSourceQuery); + ResultSet rsTarget = getBigQueryDataAsResultSet(targetTable); + return CloudSqlClient.compareResultSetData(rsSource, rsTarget); + } + } + + public static ResultSet getBigQueryDataAsResultSet(String targetTable) throws SQLException { + Connection connection = null; + DataSource dataSource = new com.simba.googlebigquery.jdbc.DataSource(); + String projectId = PluginPropertyUtils.pluginProp("projectId"); + String datasetId = PluginPropertyUtils.pluginProp("dataset"); + + String jdbcUrl = String.format(PluginPropertyUtils.pluginProp("jdbcUrl"), projectId); + dataSource.setURL(jdbcUrl); + connection = dataSource.getConnection(); + Statement statement = connection.createStatement(); + ResultSet bqResultSet = statement.executeQuery("SELECT * from " + datasetId + "." + targetTable + ";"); + + return bqResultSet; + } + public static boolean validateBqToDBTarget(String schema, String sourceTable, String targetTable) + throws SQLException, ClassNotFoundException, InterruptedException { + String getSourceQuery = "SELECT * FROM " + schema + "." + targetTable; + try (Connection connect = CloudSqlClient.getCloudSqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + ResultSet rsTarget = statement1.executeQuery(getSourceQuery); + ResultSet rsSource = getBigQueryDataAsResultSet(sourceTable); + return CloudSqlClient.compareResultSetData(rsSource, rsTarget); + } + } +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlClient.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlClient.java new file mode 100644 index 00000000..accc2781 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlClient.java @@ -0,0 +1,121 @@ +package io.cdap.plugin.cloudsqlpostgresql; + +import io.cdap.e2e.pages.actions.CdfPipelineRunAction; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cucumber.java.en.Then; +import org.junit.Assert; +import stepsdesign.BeforeActions; + +import java.io.IOException; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; + +public class CloudSqlClient { + + public static void main(String[] args) throws ClassNotFoundException, SQLException { + getCloudSqlConnection(); + } + + public static Connection getCloudSqlConnection() throws ClassNotFoundException, SQLException { + Class.forName("org.postgresql.Driver"); + String instanceConnectionName = "cdf-athena:europe-west1:cloud-postgresql-automation"; + String databaseName = "test_automation_db"; + String username = "v"; + String password = "v@123"; + String jdbcUrl = String.format( + "jdbc:postgresql://google/%s?cloudSqlInstance=%s&socketFactory=com.google.cloud.sql.postgres.SocketFactory&user=%s&password=%s", + databaseName, instanceConnectionName, username, password); + Connection conn = DriverManager.getConnection(jdbcUrl); + System.out.println("Connected to the database successfully"); + return conn; + } + +// +// public static int countRecord(String table, String schema) throws SQLException, ClassNotFoundException { +// String countQuery = "SELECT COUNT(*) as total FROM " + schema + "." + table; +// try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement(); +// ResultSet rs = statement.executeQuery(countQuery)) { +// int num = 0; +// while (rs.next()) { +// num = (rs.getInt(1)); +// } +// return num; +// } +// } + public static boolean validateRecordValues(String schema, String sourceTable, String targetTable) throws + ClassNotFoundException, SQLException { + String getSourceQuery = "SELECT * FROM " + schema + "." + sourceTable; + String getTargetQuery = "SELECT * FROM " + schema + "." + targetTable; + try (Connection connect = getCloudSqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement2 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + ResultSet rsSource = statement1.executeQuery(getSourceQuery); + ResultSet rsTarget = statement2.executeQuery(getTargetQuery); + return compareResultSetData(rsSource, rsTarget); + } + } + + static boolean compareResultSetData(ResultSet rsSource, ResultSet rsTarget) throws SQLException{ + return false; + } + + public static void createTargetTable(String targetTable, String schema) { + } + + public static void createSourceDatatypesTable(String sourceTable, String schema) { + } + + public static void createTargetDatatypesTable(String targetTable, String schema) { + } + + public static void createSourceLongTable(String sourceTable, String schema) { + } + + public static void createTargetLongTable(String targetTable, String schema) { + } + + public static void createSourceLongRawTable(String sourceTable, String schema) { + } + + public static void createTargetLongRawTable(String targetTable, String schema) { + } + + public static void createSourceLongVarcharTable(String sourceTable, String schema) { + } + + public static void createTargetLongVarCharTable(String targetTable, String schema) { + } + + public static void deleteTables(String schema, String[] strings) { + } + @Then("Validate OUT record count is equal to records transferred to target BigQuery table") + public void validateOUTRecordCountIsEqualToRecordsTransferredToTargetBigQueryTable() + throws IOException, InterruptedException, IOException { + int targetBQRecordsCount = BigQueryClient.countBqQuery(PluginPropertyUtils.pluginProp("bqTargetTable")); + BeforeActions.scenario.write("No of Records Transferred to BigQuery:" + targetBQRecordsCount); + Assert.assertEquals("Out records should match with target BigQuery table records count", + CdfPipelineRunAction.getCountDisplayedOnSourcePluginAsRecordsOut(), targetBQRecordsCount); + } + + @Then("Validate the values of records transferred to target Big Query table is equal to the values from source table") + public void validateTheValuesOfRecordsTransferredToTargetBigQueryTableIsEqualToTheValuesFromSourceTable() + throws IOException, InterruptedException, IOException, SQLException, ClassNotFoundException { + int targetBQRecordsCount = BigQueryClient.countBqQuery(PluginPropertyUtils.pluginProp("bqTargetTable")); + BeforeActions.scenario.write("No of Records Transferred to BigQuery:" + targetBQRecordsCount); + Assert.assertEquals("Out records should match with target BigQuery table records count", + CdfPipelineRunAction.getCountDisplayedOnSourcePluginAsRecordsOut(), targetBQRecordsCount); + + boolean recordsMatched = BQValidation.validateBQAndDBRecordValues(PluginPropertyUtils.pluginProp("schema"), + PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("bqTargetTable")); + Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + + "of the records in the source table", recordsMatched); + } +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runner/package-info.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runner/package-info.java new file mode 100644 index 00000000..cf665579 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runner/package-info.java @@ -0,0 +1,20 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** + * Package contains the runners for Mysql features. + */ +package io.cdap.plugin.cloudsqlpostgresql.runner; diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSql.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSql.java new file mode 100644 index 00000000..a3906d82 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSql.java @@ -0,0 +1,5 @@ +package io.cdap.plugin.cloudsqlpostgresql.stepsdesign; + +public class CloudSql { + +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java new file mode 100644 index 00000000..4641f34b --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java @@ -0,0 +1,88 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package io.cdap.plugin.common.stepsdesign; + + +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.plugin.cloudsqlpostgresql.CloudSqlClient; +import io.cucumber.java.After; +import io.cucumber.java.Before; +import org.apache.commons.lang3.RandomStringUtils; + +/** + * Cloudsql-postgresql test hooks. + */ + +public class TestSetUpHooks { + + @Before(order = 1) + public static void setTableName() { + String randomString = RandomStringUtils.randomAlphabetic(10).toUpperCase(); + String sourceTableName = String.format("SOURCETABLE_%s", randomString); + String targetTableName = String.format("TARGETTABLE_%s", randomString); + PluginPropertyUtils.addPluginProp("sourceTable", sourceTableName); + PluginPropertyUtils.addPluginProp("targetTable", targetTableName); + String schema = PluginPropertyUtils.pluginProp("schema"); + PluginPropertyUtils.addPluginProp("selectQuery", String.format("Select * from auto.newtable;")); + } +// @Before(order = 2, value = "@CLOUDSQL_SOURCE_TEST") +// public static void createTables() { +// CloudSqlClient.createSourceTable(PluginPropertyUtils.pluginProp("sourceTable"), +// PluginPropertyUtils.pluginProp("schema")); +// CloudSqlClient.createTargetTable(PluginPropertyUtils.pluginProp("targetTable"), +// PluginPropertyUtils.pluginProp("schema")); +// } + + @Before(order = 2, value = "CLOUDSQL_SOURCE_DATATYPES_TEST") + public static void createAllDatatypesTables() { + CloudSqlClient.createSourceDatatypesTable(PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("schema")); + CloudSqlClient.createTargetDatatypesTable(PluginPropertyUtils.pluginProp("targetTable"), + PluginPropertyUtils.pluginProp("schema")); + } + + @Before(order = 2, value = "@CLOUDSQL_SOURCE_DATATYPES_TEST2") + public static void createDatatypesTablesLong() { + CloudSqlClient.createSourceLongTable(PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("schema")); + CloudSqlClient.createTargetLongTable(PluginPropertyUtils.pluginProp("targetTable"), + PluginPropertyUtils.pluginProp("schema")); + } + + @Before(order = 2, value = "@CLOUDSQL_SOURCE_LONGRAW_TEST") + public static void createDatatypesTablesLongRaw() { + CloudSqlClient.createSourceLongRawTable(PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("schema")); + CloudSqlClient.createTargetLongRawTable(PluginPropertyUtils.pluginProp("targetTable"), + PluginPropertyUtils.pluginProp("schema")); + } + + @Before(order = 2, value = "@CLOUDSQL_SOURCE_DATATYPES_TEST4") + public static void createLongVarcharTables() { + CloudSqlClient.createSourceLongVarcharTable(PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("schema")); + CloudSqlClient.createTargetLongVarCharTable(PluginPropertyUtils.pluginProp("targetTable"), + PluginPropertyUtils.pluginProp("schema")); + } + + @After(order = 1, value = "@CLOUDSQL_SINK_TEST") + public static void dropTables() { + CloudSqlClient.deleteTables(PluginPropertyUtils.pluginProp("schema"), + new String[]{PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("targetTable")}); + } +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/package-info.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/package-info.java new file mode 100644 index 00000000..63f8efab --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/package-info.java @@ -0,0 +1,20 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** + * Package contains the stepDesign for common features. + */ +package io.cdap.plugin.common.stepsdesign; diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties b/cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties new file mode 100644 index 00000000..41327888 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties @@ -0,0 +1,20 @@ +validationSuccessMessage=No errors found. +#connectionName=Connection Name must be in the format :: to connect to a public CloudSQL PostgreSQL instance. +name=SQL error while getting query schema: FATAL: password authentication failed for user "f" +password=SQL error while getting query schema: FATAL: password authentication failed for user "v" +referenceName=Invalid reference name '2942hd+'. Supported characters are: letters, numbers, and '_', '-', '.', or '$'. +databaseName=SQL error while getting query schema: FATAL: database "wdkdcw" does not exist +NumberOfSplit=Invalid value for Number of Splits '0'. Must be at least 1. Specify a Number of Splits no less than 1. +fetchSize=Invalid fetch size. Fetch size must be a positive integer. +fetechSizeBlank=Invalid fetch size. Fetch size must be a positive integer. +errorMessagesBoundingQuery=Bounding Query must be specified if Number of Splits is not set to 1. Specify the Bounding Query. +errorMesagesNumberOfSplit=Split-By Field Name must be specified if Number of Splits is not set to 1. Specify the Split-by Field Name. +errorMessagesSplitColumn=Split-By Field Name must be specified if Number of Splits is not set to 1. Specify the Split-by Field Name. +errorMessageNumberOfSplitWONumber=Unable to create config for batchsource CloudSQLPostgreSQL 'numSplits' is invalid: +errorMessageImportQuery=Import Query "Select * from auto.newtable;" must contain the string '$CONDITIONS'. if Number of Splits is not set to 1. Include '$CONDITIONS' in the Import Query + +invalidMessageConnectionName=Connection Name must be in the format :: to connect to a public CloudSQL PostgreSQL instance. +invalidMessageDatabaseName=Exception while trying to validate schema of database table 'auto."newtable"' for connection +invalidMessageTableNameField=Exception while trying to validate schema of database table 'auto."wrong"' +invalidReferenceName=Invalid reference name '93bhed92@13#$'. Supported characters are: letters, numbers, and '_', '-', '.', or '$'. +invalidUSERNAME=Exception while trying to validate schema of database table 'auto."newtable"' for connection \ No newline at end of file diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties b/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties new file mode 100644 index 00000000..9aa1a056 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties @@ -0,0 +1,42 @@ +driverName=mysql +host=MYSQL_HOST +username=v +password=v@123 +databaseName=test_automation_db +zeroValue=0 +invalidSplit=abc +invalidSplitNumber=0 +wrongImportQuery="Select * from auto_updatable; +rightImportQuery=Select * from auto.newtable; +wrongReferenceName=93bhed92@13#$ +wrongUserName=null +wrongPassword=null +fetchSize=1000 +numberOfSplit= +splitBy= +blankSplit= +port=MYSQL_PORT +sourceRef=source +targetRef=target +outputSchema=[{"key":"col1","value":"int"},{"key":"col2","value":"string"},{"key":"col3","value":"string"},\ + {"key":"col4","value":"string"},{"key":"col5","value":"string"}] +user=user +pass=password +conn=conn +connectionArgument=1,new +connectionName=cdf-athena:europe-west1:cloud-postgresql-automation +connectionArgumentsList=[{"key":"queryTimeout","value":"-1"}] +tableName=newtable +validTableName=newtable +invalidTableName=wrong +dbSchemaName=auto +transactionLevel=TRANSACTION_READ_COMMITTED +connectionTimeout=10 +invalidConnectionName=abd3 +invalidDatabase=2nha +invalidUSERNAME=incorrect +macrosValueTable=newtable +#bq properties +projectId=cdf-athena +dataset=test_automation +BigQuery=sharedStyled__PluginNameContainer-cnOUhe dbXgBp From 109283bf9981497c8ffb18cfbcff1879a285e8aa Mon Sep 17 00:00:00 2001 From: suryakumari Date: Wed, 24 May 2023 17:00:27 +0530 Subject: [PATCH 2/5] e2e_CloudsqlPostgresql scenarios --- cloudsql-postgresql-plugin/pom.xml | 2 +- .../sink/DesignTime.feature | 118 +++---- .../sink/DesignTimeWithMacros.feature | 96 ++--- .../sink/DesignTimeWithValidation.feature | 216 +++++------ .../cloudsql-postgresql/sink/RunTime.feature | 146 ++++++++ .../sink/RunTimeMacro.feature | 134 +++++++ .../source/DesignTime.feature | 66 ++-- .../source/DesignTimeWithMacro.feature | 78 ++-- .../source/DesignTimeWithValidation.feature | 279 ++++++++------- .../source/RunTime.feature | 229 ++++++++++-- .../source/RunTimeMacro.feature | 334 ++++++++++++++++++ .../cloudsqlpostgresql/BQValidation.java | 162 ++++----- .../cloudsqlpostgresql/CloudSqlClient.java | 121 ------- .../CloudSqlPostgreSqlClient.java | 179 ++++++++++ .../runners/sinkrunner/TestRunner.java | 36 ++ .../sinkrunner/TestRunnerRequired.java | 36 ++ .../sinkrunner}/package-info.java | 5 +- .../runners/sourcerunner/TestRunner.java | 36 ++ .../sourcerunner/TestRunnerRequired.java | 36 ++ .../runners/sourcerunner/package-info.java | 19 + .../stepsdesign/CloudSql.java | 5 - .../stepsdesign/CloudSqlPostgreSql.java | 71 ++++ .../common/stepsdesign/TestSetUpHooks.java | 149 +++++--- .../resources/errorMessage.properties | 41 ++- .../pluginDataCyAttributes.properties | 18 + .../resources/pluginParameters.properties | 133 +++++-- .../BigQuery/BigQueryCreateTableQuery.txt | 2 + .../BigQuery/BigQueryInsertDataQuery.txt | 5 + 28 files changed, 1973 insertions(+), 779 deletions(-) create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature delete mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlClient.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlPostgreSqlClient.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunner.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunnerRequired.java rename cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/{runner => runners/sinkrunner}/package-info.java (82%) create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunner.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunnerRequired.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/package-info.java delete mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSql.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSqlPostgreSql.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableQuery.txt create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertDataQuery.txt diff --git a/cloudsql-postgresql-plugin/pom.xml b/cloudsql-postgresql-plugin/pom.xml index e8736f8b..2624792c 100644 --- a/cloudsql-postgresql-plugin/pom.xml +++ b/cloudsql-postgresql-plugin/pom.xml @@ -50,7 +50,7 @@ com.google.guava guava - 23.0 + 31.0.1-jre diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature index cf282d90..cfad6724 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature @@ -1,76 +1,70 @@ +# # Copyright © 2023 Cask Data, Inc. -## -## Licensed under the Apache License, Version 2.0 (the "License"); you may not -## use this file except in compliance with the License. You may obtain a copy of -## the License at -## -## http://www.apache.org/licenses/LICENSE-2.0 -## -## Unless required by applicable law or agreed to in writing, software -## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -## License for the specific language governing permissions and limitations under -# the License.. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# -@CLOUDSQL -Feature: CLOUDSQL Sink - Design time scenarios +@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +Feature: CloudSQL-PostgreSQL sink - Verify CloudSQL-PostgreSQL sink plugin design time scenarios - @CLOUDSQL_SINK @CLOUDSQL_SINK_BASIC - Scenario: To verify CLOUDSQL sink plugin validation with connection and basic details for connectivity + Scenario: To verify CloudSQLPostgreSQL sink plugin validation with connection and basic details for connectivity Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Sink" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" Then Replace input plugin property: "connectionName" with value: "connectionName" - Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" - Then Replace input plugin property: "tableName" with value: "tableName" - Then Replace input plugin property: "dbSchemaName" with value: "dbSchemaName" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" Then Validate "CloudSQL PostgreSQL" plugin properties Then Close the Plugin Properties page - @CLOUDSQL_SINK @CLOUDSQL_CONNECTION_ARGUMENT - Scenario: To verify CLOUDSQL sink plugin validation with connection argument - Given Open Datafusion Project to configure pipeline - When Expand Plugin group in the LHS plugins list: "Sink" - When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" - Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" - Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "connectionName" - Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields - Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" - Then Replace input plugin property: "tableName" with value: "tableName" - Then Replace input plugin property: "dbSchemaName" with value: "dbSchemaName" - Then Validate "CloudSQL PostgreSQL" plugin properties - Then Close the Plugin Properties page - - @CLOUDSQL_SINK @CLOUDSINK_REFERENCE - Scenario: To verify CLOUDSQL sink plugin validation with Advanced details for connectivity - Given Open Datafusion Project to configure pipeline - When Expand Plugin group in the LHS plugins list: "Sink" - When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" - Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" - Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "connectionName" - Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields - Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" - Then Replace input plugin property: "tableName" with value: "tableName" - Then Replace input plugin property: "dbSchemaName" with value: "dbSchemaName" - Then Select dropdown plugin property: "transactionIsolationLevel" with option value: "transactionLevel" - Then Replace input plugin property: "connectionTimeout" with value: "connectionTimeout" - Then Validate "CloudSQL PostgreSQL" plugin properties - Then Close the Plugin Properties page - + Scenario: To verify CloudSQLPostgreSQL sink plugin validation with connection arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Scenario: To verify CloudSQLPostgreSQL sink plugin validation with advanced details with connection timeout + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "connectionTimeout" with value: "connectionTimeout" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature index 0c0b70fe..772a5cfc 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature @@ -1,81 +1,53 @@ +# # Copyright © 2023 Cask Data, Inc. -## -## Licensed under the Apache License, Version 2.0 (the "License"); you may not -## use this file except in compliance with the License. You may obtain a copy of -## the License at -## -## http://www.apache.org/licenses/LICENSE-2.0 -## -## Unless required by applicable law or agreed to in writing, software -## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -## License for the specific language governing permissions and limitations under -# the License.. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# -@CLOUDSQL -Feature: CLOUDSQL Sink - Design time with Macros +@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +Feature: CloudSQL-PostgreSQL sink- Verify CloudSQL-PostgreSQL sink plugin design time macro scenarios - @CLOUDSQL_SINK @CLOUDSQL_SINK_BASIC - Scenario: To verify CLOUDSQL sink plugin validation with basic details for connectivity + Scenario: To verify CloudSQLPostgreSQL sink plugin validation with macro enabled fields for connection section Given Open Datafusion Project to configure pipeline - When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" When Expand Plugin group in the LHS plugins list: "Sink" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" - Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostGreSQLDriverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" - Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" - Then Enter textarea plugin property: "importQuery" with value: "selectQuery" - Then Click on the Get Schema button - Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostGreSQLUser" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostGreSQLPassword" + Then Click on the Macro button of Property: "connectionArguments" and set the value to: "cloudSQLPostGreSQLConnectionArguments" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" Then Validate "CloudSQL PostgreSQL" plugin properties Then Close the Plugin Properties page - Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" - Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "connectionName" -# Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields -# Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" - Then Click on the Macro button of Property: "tableName" and set the value to: "newtable" - Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "schema" - Then Validate "CloudSQL PostgreSQL2" plugin properties - @CLOUDSQL_SINK @CLOUDSQL_SINK_CONNECTION - Scenario: To verify CLOUDSQL sink plugin validation with connection details for connectivity + Scenario: To verify CloudSQLPostgreSQL sink plugin validation with macro enabled fields for basic section Given Open Datafusion Project to configure pipeline - When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" When Expand Plugin group in the LHS plugins list: "Sink" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" - Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "connectionName" with value: "connectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" - Then Enter textarea plugin property: "importQuery" with value: "selectQuery" - Then Click on the Get Schema button - Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "tableName" and set the value to: "cloudSQLPostGreSQLTableName" + Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "cloudSQLPostGreSQLSchemaName" Then Validate "CloudSQL PostgreSQL" plugin properties Then Close the Plugin Properties page - Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" - Then Click on the Macro button of Property: "select-jdbcPluginName" and set the value in textarea: "abcd" - Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" - Then Click on the Macro button of Property: "user" and set the value to: "user" - Then Click on the Macro button of Property: "password" and set the value to: "password" - Then Click on the Macro button of Property: "connectionArguments" and set the value to: "1,key" - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" - Then Replace input plugin property: "tableName" with value: "tableName" - Then Replace input plugin property: "dbSchemaName" with value: "dbSchemaName" - Then Validate "CloudSQL PostgreSQL2" plugin properties \ No newline at end of file diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature index 4c46081d..826a8e28 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature @@ -1,197 +1,143 @@ -# Copyright © 2023 Cask Data, Inc. -## -## Licensed under the Apache License, Version 2.0 (the "License"); you may not -## use this file except in compliance with the License. You may obtain a copy of -## the License at -## -## http://www.apache.org/licenses/LICENSE-2.0 -## -## Unless required by applicable law or agreed to in writing, software -## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -## License for the specific language governing permissions and limitations under -# the License.. +# +# Copyright © 2022 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# -@CLOUDSQL_SINK @CLOUDSQL_SINK_DATABASE_NAME -Feature: CLOUDSQL Sink - Design time with Validation scenarios +@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +Feature: CloudSQL-PostgreSQL Sink - Verify CloudSQL-postgreSQL Sink Plugin Error scenarios - Scenario: To verify CLOUDSQL sink plugin validation with database name + Scenario:Verify CloudSQLPostgreSQL sink plugin validation errors for mandatory fields Given Open Datafusion Project to configure pipeline - When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" When Expand Plugin group in the LHS plugins list: "Sink" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" - Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Click on the Validate button + Then Verify mandatory property error for below listed properties: + | jdbcPluginName | + | referenceName | + | database | + | tableName | + + Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with invalid reference test data + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "connectionName" with value: "connectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" - Then Enter textarea plugin property: "importQuery" with value: "selectQuery" - Then Click on the Get Schema button - Then Verify the Output Schema matches the Expected Schema: "outputSchema" - Then Validate "CloudSQL PostgreSQL" plugin properties - Then Close the Plugin Properties page - Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" - Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" - Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "tableName" with value: "tableName" - Then Replace input plugin property: "dbSchemaName" with value: "dbSchemaName" - Then Replace input plugin property: "database" with value: "invalidDatabase" + Then Enter input plugin property: "referenceName" with value: "invalidRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" Then Click on the Validate button - Then Verify that the Plugin is displaying an error message: "invalidMessageDatabaseName" on the header + Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "errorMessageCloudPostgreSQLInvalidReferenceName" - @CLOUDSQL_SINK @CLOUDSQL_SINK_CONNECTION_NAME - Scenario: To verify CLOUDSQL sink plugin validation with connection name + Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with invalid connection name test data Given Open Datafusion Project to configure pipeline - When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" When Expand Plugin group in the LHS plugins list: "Sink" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" - Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" - Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" - Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" - Then Enter textarea plugin property: "importQuery" with value: "selectQuery" - Then Click on the Get Schema button - Then Verify the Output Schema matches the Expected Schema: "outputSchema" - Then Validate "CloudSQL PostgreSQL" plugin properties - Then Close the Plugin Properties page - Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" Then Replace input plugin property: "connectionName" with value: "invalidConnectionName" - Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" - Then Replace input plugin property: "tableName" with value: "tableName" - Then Replace input plugin property: "dbSchemaName" with value: "dbSchemaName" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" Then Click on the Validate button - Then Verify that the Plugin Property: "connectionName" is displaying an in-line error message: "invalidMessageConnectionName" + Then Verify that the Plugin Property: "connectionName" is displaying an in-line error message: "errorMessageConnectionName" - @CLOUDSQL_SINK @CLOUDSQL_SINK_TABLE_NAME_FIELD - Scenario: To verify CLOUDSQL sink plugin validation with table name field + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with invalid database Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" When Expand Plugin group in the LHS plugins list: "Sink" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "connectionName" with value: "connectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" Then Enter textarea plugin property: "importQuery" with value: "selectQuery" Then Click on the Get Schema button - Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" Then Validate "CloudSQL PostgreSQL" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" - Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" - Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields - Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" - Then Replace input plugin property: "tableName" with value: "invalidTableName" - Then Replace input plugin property: "dbSchemaName" with value: "dbSchemaName" - Then Click on the Validate button - Then Verify that the Plugin is displaying an error message: "invalidMessageTableNameField" on the header - - @CLOUDSQL_SINK @CLOUDSQL_SINK_REFERENCE_NAME - Scenario: To verify CLOUDSQL sink plugin validation with Advanced details for connectivity - Given Open Datafusion Project to configure pipeline - When Expand Plugin group in the LHS plugins list: "Sink" - When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" - Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" Then Replace input plugin property: "connectionName" with value: "connectionName" - Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Replace input plugin property: "database" with value: "test_automation_db" - Then Replace input plugin property: "tableName" with value: "validTableName" - Then Enter input plugin property: "referenceName" with value: "wrongReferenceName" + Then Replace input plugin property: "database" with value: "invalidDatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Enter input plugin property: "referenceName" with value: "targetRef" Then Click on the Validate button - Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "invalidReferenceName" + Then Verify that the Plugin is displaying an error message: "errorMessageInvalidSinkDatabase" on the header - @@CLOUDSQL_SINK @CLOUDSQL_SINK_USERNAME - Scenario: To verify CLOUDSQL sink plugin validation with username + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with invalid table name Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" When Expand Plugin group in the LHS plugins list: "Sink" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "connectionName" with value: "connectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" Then Enter textarea plugin property: "importQuery" with value: "selectQuery" Then Click on the Get Schema button - Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" Then Validate "CloudSQL PostgreSQL" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" - Then Replace input plugin property: "user" with value: "invalidUSERNAME" for Credentials and Authorization related fields + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" - Then Replace input plugin property: "tableName" with value: "tableName" - Then Replace input plugin property: "dbSchemaName" with value: "dbSchemaName" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "invalidTable" Then Click on the Validate button - Then Verify that the Plugin is displaying an error message: "invalidUSERNAME" on the header + Then Verify that the Plugin Property: "tableName" is displaying an in-line error message: "errorMessageInvalidTableName" - @CLOUDSQL_SINK @CLOUDSQL_SINK_BASIC - Scenario: To verify CLOUDSQL sink plugin validation with connection and basic details for connectivity + Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with blank username Given Open Datafusion Project to configure pipeline - When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" When Expand Plugin group in the LHS plugins list: "Sink" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" - Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" - Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" - Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" - Then Enter textarea plugin property: "importQuery" with value: "selectQuery" - Then Click on the Get Schema button - Then Verify the Output Schema matches the Expected Schema: "outputSchema" - Then Validate "CloudSQL PostgreSQL" plugin properties - Then Close the Plugin Properties page - Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" Then Replace input plugin property: "connectionName" with value: "connectionName" - Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" - Then Replace input plugin property: "tableName" with value: "tableName" - Then Replace input plugin property: "dbSchemaName" with value: "dbSchemaName" - Then Validate "CloudSQL PostgreSQL" plugin properties - Then Close the Plugin Properties page \ No newline at end of file + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Click on the Validate button + Then Verify that the Plugin Property: "user" is displaying an in-line error message: "errorMessageBlankUsername" diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature new file mode 100644 index 00000000..55a5b078 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature @@ -0,0 +1,146 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +Feature: CloudSQL-PostgreSQL sink - Verify data transfer from BigQuery source to CloudSQL-PostgreSQL sink + + @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE + Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink successfully with supported datatypes + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on preview data for CloudSQLPostgreSQL sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" +# Then Validate the values of records transferred to target PostGreSQL table is equal to the values from source BigQuery table + + @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE + Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink successfully when connection arguments are set + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on preview data for CloudSQLPostgreSQL sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" +# Then Validate the values of records transferred to target PostGreSQL table is equal to the values from source BigQuery table + + @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE + Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink with Advanced property Connection timeout + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Replace input plugin property: "connectionTimeout" with value: "connectionTimeout" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on preview data for CloudSQLPostgreSQL sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" +# Then Validate the values of records transferred to target PostGreSQL table is equal to the values from source BigQuery table diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature new file mode 100644 index 00000000..df6df385 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature @@ -0,0 +1,134 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +Feature: CloudSQL-PostgreSQL sink - Verify data transfer to PostgreSQL sink with macro arguments + + @BQ_SOURCE_TEST @POSTGRESQL_TEST_TABLE + Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink using macro arguments in connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId" + Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId" + Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" + Then Click on the Macro button of Property: "table" and set the value to: "bqTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqSourceTable" for key "bqTable" + Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqSourceTable" for key "bqTable" + Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs +# Then Validate the values of records transferred to target PostGreSQL table is equal to the values from source BigQuery table + + @BQ_SOURCE_TEST @POSTGRESQL_TEST_TABLE + Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink using macro arguments in basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId" + Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId" + Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" + Then Click on the Macro button of Property: "table" and set the value to: "bqTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "tableName" and set the value to: "cloudSQLPostgreSQLTableName" + Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "cloudSQLPostgreSQLSchemaName" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqSourceTable" for key "bqTable" + Then Enter runtime argument value "targetTable" for key "cloudSQLPostgreSQLTableName" + Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqSourceTable" for key "bqTable" + Then Enter runtime argument value "targetTable" for key "cloudSQLPostgreSQLTableName" + Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs +# Then Validate the values of records transferred to target PostGreSQL table is equal to the values from source BigQuery table diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature index da354a8a..db67ae17 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature @@ -1,35 +1,57 @@ +# # Copyright © 2023 Cask Data, Inc. -## -## Licensed under the Apache License, Version 2.0 (the "License"); you may not -## use this file except in compliance with the License. You may obtain a copy of -## the License at -## -## http://www.apache.org/licenses/LICENSE-2.0 -## -## Unless required by applicable law or agreed to in writing, software -## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -## License for the specific language governing permissions and limitations under -# the License.. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# -@CLOUDSQL -Feature: CLOUDSQL Source - Design time scenarios +@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +Feature: CloudSQL-PostgreSQL source - Verify CloudSQLPostgreSQL source plugin design time scenarios - @CLOUDSQL_SOURCE_TEST - Scenario: Verify user is able to validate the plugin when configured for basic Section. + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify CloudSQLPostgreSQL source plugin validation with connection and basic details for connectivity Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "connectionName" with value: "connectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" Then Enter textarea plugin property: "importQuery" with value: "selectQuery" Then Click on the Get Schema button - Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify CloudSQLPostgreSQL source plugin validation with connection arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" Then Validate "CloudSQL PostgreSQL" plugin properties Then Close the Plugin Properties page diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature index ff935282..11e5f016 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature @@ -1,48 +1,54 @@ +# # Copyright © 2023 Cask Data, Inc. -## -## Licensed under the Apache License, Version 2.0 (the "License"); you may not -## use this file except in compliance with the License. You may obtain a copy of -## the License at -## -## http://www.apache.org/licenses/LICENSE-2.0 -## -## Unless required by applicable law or agreed to in writing, software -## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -## License for the specific language governing permissions and limitations under -# the License.. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# -@CLOUDSQL -Feature: CloudSQL-postgreSQL source - Verify CloudSQL-postgreSQL source plugin design time Macros scenarios +@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +Feature: CloudSQL-PostgreSQL source - Verify CloudSQL-PostgreSQL source plugin design time macros scenarios - Scenario: Verify user should be able to validate plugin with macros for Connection section + Scenario: To verify CloudSQLPostgreSQL source plugin validation with macro enabled fields for connection section Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-postgresql" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostGreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" Then Replace input plugin property: "connectionName" with value: "connectionName" - Then Click on the Macro button of Property: "user" and set the value to: "user" - Then Click on the Macro button of Property: "password" and set the value to: "pass" - Then Click on the Macro button of Property: "connectionArguments" and set the value to: "1,key" - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostGreSQLUser" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostGreSQLPassword" + Then Click on the Macro button of Property: "connectionArguments" and set the value to: "cloudSQLPostGreSQLConnectionArguments" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Enter textarea plugin property: "importQuery" with value: "selectQuery" Then Validate "CloudSQL PostgreSQL" plugin properties Then Close the Plugin Properties page - Scenario: Verify user should be able to validate plugin with macros for Basic section - Given Open Datafusion Project to configure pipeline - When Expand Plugin group in the LHS plugins list: "Source" - When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" - Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" - Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" - Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" - Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "Select * from auto.newtable;" - Then Validate "CloudSQL PostgreSQL" plugin properties - Then Close the Plugin Properties page \ No newline at end of file + Scenario: To verify CloudSQLPostgreSQL source plugin validation with macro enabled fields for basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "splitBy" and set the value to: "cloudSQLPostGreSQLSplitBy" + Then Click on the Macro button of Property: "fetchSize" and set the value to: "cloudSQLPostGreSQLFetchSize" + Then Click on the Macro button of Property: "boundingQuery" and set the value in textarea: "cloudSQLPostGreSQLBoundingQuery" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "cloudSQLPostGreSQLImportQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature index d780b45f..e5ae6d0d 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature @@ -1,210 +1,229 @@ +# # Copyright © 2023 Cask Data, Inc. -## -## Licensed under the Apache License, Version 2.0 (the "License"); you may not -## use this file except in compliance with the License. You may obtain a copy of -## the License at -## -## http://www.apache.org/licenses/LICENSE-2.0 -## -## Unless required by applicable law or agreed to in writing, software -## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -## License for the specific language governing permissions and limitations under -# the License.. - -@CLOUDSQL +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required Feature: CloudSQL-postgreSQL source - Verify CloudSQL-postgreSQL source plugin design time validation scenarios - @CLOUDSQL_SOURCE_TEST @CLOUDSQL_DATABASE - Scenario: Verify Database field validation error message with invalid test data + Scenario:Verify CloudSQLPostgreSQL source plugin validation errors for mandatory fields + Given Open Datafusion Project to configure pipeline + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Validate button + Then Verify mandatory property error for below listed properties: + | jdbcPluginName | + | database | + | referenceName | + | importQuery | + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid reference test data Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "connectionName" with value: "connectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "null" + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "invalidRef" + Then Replace input plugin property: "database" with value: "databaseName" Then Enter textarea plugin property: "importQuery" with value: "selectQuery" - Then Click on the Get Schema button - Then Verify the Output Schema matches the Expected Schema: "outputSchema" - Then Validate "CloudSQL PostgreSQL" plugin properties + Then Click on the Validate button + Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "errorMessageCloudPostgreSQLInvalidReferenceName" - @CLOUDSQL_SOURCE_TEST @CLOUDSQL_IMPORTQUERY - Scenario: Verify ImportQuery Field validation error message with invalid test data + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid connection name test data Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "connectionName" with value: "invalidConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "databaseName" - Then Enter textarea plugin property: "importQuery" with value: "wrongImportQuery" - Then Click on the Get Schema button - Then Verify the Output Schema matches the Expected Schema: "outputSchema" - Then Validate "CloudSQL PostgreSQL" plugin properties + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin Property: "connectionName" is displaying an in-line error message: "errorMessageConnectionName" - @CLOUDSQL_SOURCE_TEST @CLOUDSQL_REFERENCENAME - Scenario: Verify Reference Name field validation error message with invalid test data + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with blank bounding query Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "connectionName" with value: "connectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "wrongReferenceName" - Then Replace input plugin property: "database" with value: "databaseName" - Then Enter textarea plugin property: "importQuery" with value: "rightImportQuery" - Then Click on the Get Schema button - Then Verify the Output Schema matches the Expected Schema: "outputSchema" - Then Validate "CloudSQL PostgreSQL" plugin properties + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "invalidDatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "invalidImportQuery" + Then Replace input plugin property: "splitBy" with value: "splitBy" + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Click on the Validate button + Then Verify that the Plugin Property: "boundingQuery" is displaying an in-line error message: "errorMessageBoundingQuery" + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageBoundingQuery" + Then Verify that the Plugin Property: "importQuery" is displaying an in-line error message: "errorMessageInvalidImportQuery" - @CLOUDSQL_SOURCE_TEST @CLOUDSQL_USERNAME - Scenario: Verify UserName field validation error message with invalid test data + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with number of splits without split by field name Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" - Then Replace input plugin property: "user" with value: "wrongUserName" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "databaseName" - Then Enter textarea plugin property: "importQuery" with value: "rightImportQuery" - Then Click on the Get Schema button - Then Verify the Output Schema matches the Expected Schema: "outputSchema" - Then Validate "CloudSQL PostgreSQL" plugin properties + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Click on the Validate button + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageBlankSplitBy" + Then Verify that the Plugin Property: "splitBy" is displaying an in-line error message: "errorMessageBlankSplitBy" - @CLOUDSQL_SOURCE_TEST @CLOUDSQL_PASSWORD - Scenario: Verify Password field validation error message with invalid test data + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message when number of Split value is not a number Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "connectionName" with value: "connectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "wrongPassword" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "databaseName" - Then Enter textarea plugin property: "importQuery" with value: "rightImportQuery" - Then Click on the Get Schema button - Then Verify the Output Schema matches the Expected Schema: "outputSchema" - Then Validate "CloudSQL PostgreSQL" plugin properties + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "numSplits" with value: "zeroSplits" + Then Click on the Validate button + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageNumberOfSplitNotNumber" + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message when number of Split value is changed to zero + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "numSplits" with value: "zeroValue" + Then Click on the Validate button + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageInvalidNumberOfSplits" - @CLOUDSQL_SOURCE_TEST @CLOUDSQL_SIZE - Scenario: Verify fetch size field validation error message with invalid test data + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message when fetch size is changed to zero Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "connectionName" with value: "connectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "databaseName" - Then Enter textarea plugin property: "importQuery" with value: "rightImportQuery" + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" Then Replace input plugin property: "fetchSize" with value: "zeroValue" - Then Click on the Get Schema button - Then Verify the Output Schema matches the Expected Schema: "outputSchema" - Then Validate "CloudSQL PostgreSQL" plugin properties + Then Click on the Validate button + Then Verify that the Plugin Property: "fetchSize" is displaying an in-line error message: "errorMessageInvalidFetchSize" - @CLOUDSQL_SOURCE_TEST @CLOUDSQL_SPLIT - Scenario: To Verify the Split-By field validation error message when number of Split value is changed to zero + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid database Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "connectionName" with value: "connectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "databaseName" - Then Enter textarea plugin property: "importQuery" with value: "rightImportQuery" - Then Replace input plugin property: "fetchSize" with value: "fetchSize" - Then Replace input plugin property: "numSplits" with value: "invalidSplitNumber" + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "invalidDatabase" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" Then Click on the Validate button - Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMesagesNumberOfSplit" - Then Verify that the Plugin Property: "splitBy" is displaying an in-line error message: "errorMessagesSplitColumn" - Then Verify that the Plugin Property: "boundingQuery" is displaying an in-line error message: "errorMessagesBoundingQuery" - Then Verify that the Plugin Property: "importQuery" is displaying an in-line error message: "errorMessagesimportQuery" + Then Verify that the Plugin is displaying an error message: "errorMessageInvalidSourceDatabase" on the header - @CLOUDSQL_SOURCE_TEST @CLOUDSQL_SPLIT - Scenario: To Verify the Split-By field validation error message when number of Split value is not a number + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid import query Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "connectionName" with value: "connectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "databaseName" - Then Enter textarea plugin property: "importQuery" with value: "rightImportQuery" - Then Replace input plugin property: "fetchSize" with value: "fetchSize" - Then Replace input plugin property: "numSplits" with value: "invalidSplit" + Then Enter textarea plugin property: "importQuery" with value: "invalidImportQuery" + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" Then Click on the Validate button - Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageNumberOfSplitWONumber" + Then Verify that the Plugin Property: "importQuery" is displaying an in-line error message: "errorMessageInvalidImportQuery" - @CLOUDSQL_SOURCE_TEST @CLOUDSQL_BOUNDING_QUERY - Scenario: Verify the Bounding Query validation error when Split-By and Number of Splits values are not provided + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with blank username Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" - Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "databaseName" - Then Enter textarea plugin property: "importQuery" with value: "rightImportQuery" - Then Replace input plugin property: "fetchSize" with value: "fetchSize" - Then Replace input plugin property: "splitBy" with value: "splitBy" - Then Replace input plugin property: "numSplits" with value: "blankSplit" + Then Enter textarea plugin property: "importQuery" with value: "invalidImportQuery" Then Click on the Validate button - Then Verify that the Plugin Property: "boundingQuery" is displaying an in-line error message: "errorMessagesBoundingQuery" - Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMesagesNumberOfSplit" - Then Verify that the Plugin Property: "splitBy" is displaying an in-line error message: "errorMessagesSplitColumn" + Then Verify that the Plugin Property: "user" is displaying an in-line error message: "errorMessageBlankUsername" - @CLOUDSQL_SOURCE_TEST @CLOUDSQL_GETSCHEMA - Scenario: Verify user is able to click on GetSchema button and schema is retrieved after providing the basic connection property details + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with blank password Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "connectionName" with value: "connectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "databaseName" - Then Enter textarea plugin property: "importQuery" with value: "rightImportQuery" - Then Click on the Get Schema button - - - - + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "invalidDatabase" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "errorMessageBlankPassword" on the header + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid password + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "invalidPassword" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "invalidDatabase" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "errorMessageInvalidPassword" on the header diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature index 06dc2fdb..a3b4a6d1 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature @@ -1,46 +1,51 @@ +# # Copyright © 2023 Cask Data, Inc. -## -## Licensed under the Apache License, Version 2.0 (the "License"); you may not -## use this file except in compliance with the License. You may obtain a copy of -## the License at -## -## http://www.apache.org/licenses/LICENSE-2.0 -## -## Unless required by applicable law or agreed to in writing, software -## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -## License for the specific language governing permissions and limitations under -# the License.. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# -@CLOUDSQL -Feature: CLOUDSQL Source - Run Time scenarios +@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +Feature: CloudSQL-PostGreSQL Source - Run Time scenarios - Scenario: Verify user should be able to preview the pipeline when plugin is configured for fetching table details - within database using CloudSQLPostGreSQL source to BQ sink + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @BQ_SINK_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL source to BigQuery sink successfully with supported datatypes Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-postgresql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Replace input plugin property: "connectionName" with value: "cdf-athena:europe-west1:cloud-postgresql-automation" + Then Replace input plugin property: "connectionName" with value: "connectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Enter input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "test_automation_db" + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" Then Enter textarea plugin property: "importQuery" with value: "selectQuery" Then Click on the Get Schema button - Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" Then Validate "CloudSQL PostgreSQL" plugin properties Then Close the Plugin Properties page - Then Expand Plugin group in the LHS plugins list: "Sink" - Then Select plugin: "BigQuery" from the plugins list as: "Sink" Then Navigate to the properties page of plugin: "BigQuery" Then Replace input plugin property: "project" with value: "projectId" Then Enter input plugin property: "datasetProject" with value: "projectId" Then Enter input plugin property: "referenceName" with value: "BQReferenceName" Then Enter input plugin property: "dataset" with value: "dataset" Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "updateTableSchema" Then Validate "BigQuery" plugin properties Then Close the Plugin Properties page Then Save the pipeline @@ -56,5 +61,177 @@ Feature: CLOUDSQL Source - Run Time scenarios Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs - Then Validate OUT record count is equal to records transferred to target BigQuery table - Then Validate the values of records transferred to target Big Query table is equal to the values from source table \ No newline at end of file +# Then Validate the values of records transferred to target Big Query table is equal to the values from source table + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @BQ_SINK_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify data is getting transferred from PostgreSQL source to BigQuery sink successfully when connection arguments are set + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs +# Then Validate the values of records transferred to target Big Query table is equal to the values from source table + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @BQ_SINK_TEST + Scenario: To verify pipeline failure message in logs when an invalid bounding query is provided + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "splitBy" with value: "splitBy" + Then Enter textarea plugin property: "importQuery" with value: "importQuery" + Then Click on the Get Schema button + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Enter textarea plugin property: "boundingQuery" with value: "invalidBoundingQueryValue" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + And Save and Deploy Pipeline + And Run the Pipeline in Runtime + And Wait till pipeline is in running state + And Verify the pipeline status is "Failed" + Then Open Pipeline logs and verify Log entries having below listed Level and Message: + | Level | Message | + | ERROR | errorLogsMessageInvalidBoundingQuery | + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @BQ_SINK_TEST + Scenario: To verify the pipeline fails while preview with invalid bounding query setting the split-By field + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "splitBy" with value: "splitBy" + Then Enter textarea plugin property: "importQuery" with value: "importQuery" + Then Click on the Get Schema button + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Enter textarea plugin property: "boundingQuery" with value: "invalidBoundingQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Verify the preview run status of pipeline in the logs is "failed" + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify data is getting transferred from PostgreSQL to PostgreSQL successfully with supported datatypes + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on preview data for CloudSQLPostgreSQL sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" +# Then Validate the values of records transferred to target table is equal to the values from source table diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature new file mode 100644 index 00000000..df6deeda --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature @@ -0,0 +1,334 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +Feature: CloudSQL-PostGreSQL source - Verify CloudSQL-PostGreSQL plugin data transfer with macro arguments + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL to CloudSQLPostgreSQL successfully using macro arguments in connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs +# Then Validate the values of records transferred to target table is equal to the values from source table + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL to CloudSQLPostgreSQL successfully using macro arguments in basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "splitBy" and set the value to: "cloudSQLPostgreSQLSplitByColumn" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "cloudSQLPostgreSQLImportQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "tableName" and set the value to: "cloudSQLPostgreSQLTableName" + Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "cloudSQLPostgreSQLSchemaName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Validate "CloudSQL PostgreSQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "splitByColumn" for key "cloudSQLPostgreSQLSplitByColumn" + Then Enter runtime argument value "selectQuery" for key "cloudSQLPostgreSQLImportQuery" + Then Enter runtime argument value "targetTable" for key "cloudSQLPostgreSQLTableName" + Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "splitByColumn" for key "cloudSQLPostgreSQLSplitByColumn" + Then Enter runtime argument value "selectQuery" for key "cloudSQLPostgreSQLImportQuery" + Then Enter runtime argument value "targetTable" for key "cloudSQLPostgreSQLTableName" + Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs +# Then Validate the values of records transferred to target table is equal to the values from source table + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify pipeline preview fails when invalid connection details provided using macro arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "invalidDriverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value "invalidUserName" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value "invalidPassword" for key "cloudSQLPostgreSQLPassword" + Then Run the preview of pipeline with runtime arguments + Then Verify the preview of pipeline is "Failed" + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify pipeline preview fails when invalid basic details provided using macro arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "cloudSQLPostgreSQLInvalidImportQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "tableName" and set the value to: "cloudSQLPostgreSQLTableName" + Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "cloudSQLPostgreSQLSchemaName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Validate "CloudSQL PostgreSQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "invalidTableNameImportQuery" for key "cloudSQLPostgreSQLInvalidImportQuery" + Then Enter runtime argument value "invalidTable" for key "cloudSQLPostgreSQLTableName" + Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName" + Then Run the preview of pipeline with runtime arguments + Then Verify the preview of pipeline is "Failed" + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @BQ_SINK_TEST + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL source to BigQuery sink using macro arguments in connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId" + Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId" + Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" + Then Click on the Macro button of Property: "table" and set the value to: "bqTable" + Then Click on the Macro button of Property: "truncateTableMacroInput" and set the value to: "bqTruncateTable" + Then Click on the Macro button of Property: "updateTableSchemaMacroInput" and set the value to: "bqUpdateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword" + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqTargetTable" for key "bqTable" + Then Enter runtime argument value "bqTruncateTable" for key "bqTruncateTable" + Then Enter runtime argument value "bqUpdateTableSchema" for key "bqUpdateTableSchema" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword" + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqTargetTable" for key "bqTable" + Then Enter runtime argument value "bqTruncateTable" for key "bqTruncateTable" + Then Enter runtime argument value "bqUpdateTableSchema" for key "bqUpdateTableSchema" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs +# Then Validate the values of records transferred to target Big Query table is equal to the values from source table + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @BQ_SINK_TEST + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL source to BigQuery sink using macro arguments in basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "splitBy" and set the value to: "cloudSQLPostgreSQLSplitByColumn" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "cloudSQLPostgreSQLImportQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId" + Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId" + Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" + Then Click on the Macro button of Property: "table" and set the value to: "bqTable" + Then Click on the Macro button of Property: "truncateTableMacroInput" and set the value to: "bqTruncateTable" + Then Click on the Macro button of Property: "updateTableSchemaMacroInput" and set the value to: "bqUpdateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "splitByColumn" for key "cloudSQLPostgreSQLSplitByColumn" + Then Enter runtime argument value "selectQuery" for key "cloudSQLPostgreSQLImportQuery" + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqTargetTable" for key "bqTable" + Then Enter runtime argument value "bqTruncateTable" for key "bqTruncateTable" + Then Enter runtime argument value "bqUpdateTableSchema" for key "bqUpdateTableSchema" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "splitByColumn" for key "cloudSQLPostgreSQLSplitByColumn" + Then Enter runtime argument value "selectQuery" for key "cloudSQLPostgreSQLImportQuery" + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqTargetTable" for key "bqTable" + Then Enter runtime argument value "bqTruncateTable" for key "bqTruncateTable" + Then Enter runtime argument value "bqUpdateTableSchema" for key "bqUpdateTableSchema" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs +# Then Validate the values of records transferred to target Big Query table is equal to the values from source table diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java index ea99bcc8..5722a777 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java @@ -1,81 +1,81 @@ -/* - * Copyright © 2023 Cask Data, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not - * use this file except in compliance with the License. You may obtain a copy of - * the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations under - * the License. - */ - -package io.cdap.plugin.cloudsqlpostgresql; - -import com.simba.googlebigquery.jdbc.DataSource; -import io.cdap.e2e.utils.PluginPropertyUtils; - -import java.sql.Connection; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.ArrayList; -import java.util.List; - -/** - * Big Query client. - */ -public class BQValidation { - - /** - * Extracts entire data from source and target tables. - * @param sourceTable table at the source side - * @param targetTable table at the BigQuery side - * @return true if the values in source and target side are equal - */ - public static boolean validateBQAndDBRecordValues(String schema, String sourceTable, String targetTable) - throws SQLException, ClassNotFoundException, InterruptedException { - String getSourceQuery = "SELECT * FROM " + schema + "." + sourceTable; - - try (Connection connect = CloudSqlClient.getCloudSqlConnection()) { - connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); - Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, - ResultSet.HOLD_CURSORS_OVER_COMMIT); - - ResultSet rsSource = statement1.executeQuery(getSourceQuery); - ResultSet rsTarget = getBigQueryDataAsResultSet(targetTable); - return CloudSqlClient.compareResultSetData(rsSource, rsTarget); - } - } - - public static ResultSet getBigQueryDataAsResultSet(String targetTable) throws SQLException { - Connection connection = null; - DataSource dataSource = new com.simba.googlebigquery.jdbc.DataSource(); - String projectId = PluginPropertyUtils.pluginProp("projectId"); - String datasetId = PluginPropertyUtils.pluginProp("dataset"); - - String jdbcUrl = String.format(PluginPropertyUtils.pluginProp("jdbcUrl"), projectId); - dataSource.setURL(jdbcUrl); - connection = dataSource.getConnection(); - Statement statement = connection.createStatement(); - ResultSet bqResultSet = statement.executeQuery("SELECT * from " + datasetId + "." + targetTable + ";"); - - return bqResultSet; - } - public static boolean validateBqToDBTarget(String schema, String sourceTable, String targetTable) - throws SQLException, ClassNotFoundException, InterruptedException { - String getSourceQuery = "SELECT * FROM " + schema + "." + targetTable; - try (Connection connect = CloudSqlClient.getCloudSqlConnection()) { - connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); - Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, - ResultSet.HOLD_CURSORS_OVER_COMMIT); - ResultSet rsTarget = statement1.executeQuery(getSourceQuery); - ResultSet rsSource = getBigQueryDataAsResultSet(sourceTable); - return CloudSqlClient.compareResultSetData(rsSource, rsTarget); - } - } -} +///* +// * Copyright © 2023 Cask Data, Inc. +// * +// * Licensed under the Apache License, Version 2.0 (the "License"); you may not +// * use this file except in compliance with the License. You may obtain a copy of +// * the License at +// * +// * http://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// * License for the specific language governing permissions and limitations under +// * the License. +// */ +// +//package io.cdap.plugin.cloudsqlpostgresql; +// +//import com.simba.googlebigquery.jdbc.DataSource; +//import io.cdap.e2e.utils.PluginPropertyUtils; +// +//import java.sql.Connection; +//import java.sql.ResultSet; +//import java.sql.SQLException; +//import java.sql.Statement; +//import java.util.ArrayList; +//import java.util.List; +// +///** +// * Big Query client. +// */ +//public class BQValidation { +// +// /** +// * Extracts entire data from source and target tables. +// * @param sourceTable table at the source side +// * @param targetTable table at the BigQuery side +// * @return true if the values in source and target side are equal +// */ +// public static boolean validateBQAndDBRecordValues(String schema, String sourceTable, String targetTable) +// throws SQLException, ClassNotFoundException, InterruptedException { +// String getSourceQuery = "SELECT * FROM " + schema + "." + sourceTable; +// +// try (Connection connect = CloudSqlClient.getCloudSqlConnection()) { +// connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); +// Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, +// ResultSet.HOLD_CURSORS_OVER_COMMIT); +// +// ResultSet rsSource = statement1.executeQuery(getSourceQuery); +// ResultSet rsTarget = getBigQueryDataAsResultSet(targetTable); +// return CloudSqlClient.compareResultSetData(rsSource, rsTarget); +// } +// } +// +// public static ResultSet getBigQueryDataAsResultSet(String targetTable) throws SQLException { +// Connection connection = null; +// DataSource dataSource = new com.simba.googlebigquery.jdbc.DataSource(); +// String projectId = PluginPropertyUtils.pluginProp("projectId"); +// String datasetId = PluginPropertyUtils.pluginProp("dataset"); +// +// String jdbcUrl = String.format(PluginPropertyUtils.pluginProp("jdbcUrl"), projectId); +// dataSource.setURL(jdbcUrl); +// connection = dataSource.getConnection(); +// Statement statement = connection.createStatement(); +// ResultSet bqResultSet = statement.executeQuery("SELECT * from " + datasetId + "." + targetTable + ";"); +// +// return bqResultSet; +// } +// public static boolean validateBqToDBTarget(String schema, String sourceTable, String targetTable) +// throws SQLException, ClassNotFoundException, InterruptedException { +// String getSourceQuery = "SELECT * FROM " + schema + "." + targetTable; +// try (Connection connect = CloudSqlClient.getCloudSqlConnection()) { +// connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); +// Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, +// ResultSet.HOLD_CURSORS_OVER_COMMIT); +// ResultSet rsTarget = statement1.executeQuery(getSourceQuery); +// ResultSet rsSource = getBigQueryDataAsResultSet(sourceTable); +// return CloudSqlClient.compareResultSetData(rsSource, rsTarget); +// } +// } +//} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlClient.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlClient.java deleted file mode 100644 index accc2781..00000000 --- a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlClient.java +++ /dev/null @@ -1,121 +0,0 @@ -package io.cdap.plugin.cloudsqlpostgresql; - -import io.cdap.e2e.pages.actions.CdfPipelineRunAction; -import io.cdap.e2e.utils.BigQueryClient; -import io.cdap.e2e.utils.PluginPropertyUtils; -import io.cucumber.java.en.Then; -import org.junit.Assert; -import stepsdesign.BeforeActions; - -import java.io.IOException; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; - -public class CloudSqlClient { - - public static void main(String[] args) throws ClassNotFoundException, SQLException { - getCloudSqlConnection(); - } - - public static Connection getCloudSqlConnection() throws ClassNotFoundException, SQLException { - Class.forName("org.postgresql.Driver"); - String instanceConnectionName = "cdf-athena:europe-west1:cloud-postgresql-automation"; - String databaseName = "test_automation_db"; - String username = "v"; - String password = "v@123"; - String jdbcUrl = String.format( - "jdbc:postgresql://google/%s?cloudSqlInstance=%s&socketFactory=com.google.cloud.sql.postgres.SocketFactory&user=%s&password=%s", - databaseName, instanceConnectionName, username, password); - Connection conn = DriverManager.getConnection(jdbcUrl); - System.out.println("Connected to the database successfully"); - return conn; - } - -// -// public static int countRecord(String table, String schema) throws SQLException, ClassNotFoundException { -// String countQuery = "SELECT COUNT(*) as total FROM " + schema + "." + table; -// try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement(); -// ResultSet rs = statement.executeQuery(countQuery)) { -// int num = 0; -// while (rs.next()) { -// num = (rs.getInt(1)); -// } -// return num; -// } -// } - public static boolean validateRecordValues(String schema, String sourceTable, String targetTable) throws - ClassNotFoundException, SQLException { - String getSourceQuery = "SELECT * FROM " + schema + "." + sourceTable; - String getTargetQuery = "SELECT * FROM " + schema + "." + targetTable; - try (Connection connect = getCloudSqlConnection()) { - connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); - Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, - ResultSet.HOLD_CURSORS_OVER_COMMIT); - Statement statement2 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, - ResultSet.HOLD_CURSORS_OVER_COMMIT); - ResultSet rsSource = statement1.executeQuery(getSourceQuery); - ResultSet rsTarget = statement2.executeQuery(getTargetQuery); - return compareResultSetData(rsSource, rsTarget); - } - } - - static boolean compareResultSetData(ResultSet rsSource, ResultSet rsTarget) throws SQLException{ - return false; - } - - public static void createTargetTable(String targetTable, String schema) { - } - - public static void createSourceDatatypesTable(String sourceTable, String schema) { - } - - public static void createTargetDatatypesTable(String targetTable, String schema) { - } - - public static void createSourceLongTable(String sourceTable, String schema) { - } - - public static void createTargetLongTable(String targetTable, String schema) { - } - - public static void createSourceLongRawTable(String sourceTable, String schema) { - } - - public static void createTargetLongRawTable(String targetTable, String schema) { - } - - public static void createSourceLongVarcharTable(String sourceTable, String schema) { - } - - public static void createTargetLongVarCharTable(String targetTable, String schema) { - } - - public static void deleteTables(String schema, String[] strings) { - } - @Then("Validate OUT record count is equal to records transferred to target BigQuery table") - public void validateOUTRecordCountIsEqualToRecordsTransferredToTargetBigQueryTable() - throws IOException, InterruptedException, IOException { - int targetBQRecordsCount = BigQueryClient.countBqQuery(PluginPropertyUtils.pluginProp("bqTargetTable")); - BeforeActions.scenario.write("No of Records Transferred to BigQuery:" + targetBQRecordsCount); - Assert.assertEquals("Out records should match with target BigQuery table records count", - CdfPipelineRunAction.getCountDisplayedOnSourcePluginAsRecordsOut(), targetBQRecordsCount); - } - - @Then("Validate the values of records transferred to target Big Query table is equal to the values from source table") - public void validateTheValuesOfRecordsTransferredToTargetBigQueryTableIsEqualToTheValuesFromSourceTable() - throws IOException, InterruptedException, IOException, SQLException, ClassNotFoundException { - int targetBQRecordsCount = BigQueryClient.countBqQuery(PluginPropertyUtils.pluginProp("bqTargetTable")); - BeforeActions.scenario.write("No of Records Transferred to BigQuery:" + targetBQRecordsCount); - Assert.assertEquals("Out records should match with target BigQuery table records count", - CdfPipelineRunAction.getCountDisplayedOnSourcePluginAsRecordsOut(), targetBQRecordsCount); - - boolean recordsMatched = BQValidation.validateBQAndDBRecordValues(PluginPropertyUtils.pluginProp("schema"), - PluginPropertyUtils.pluginProp("sourceTable"), - PluginPropertyUtils.pluginProp("bqTargetTable")); - Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + - "of the records in the source table", recordsMatched); - } -} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlPostgreSqlClient.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlPostgreSqlClient.java new file mode 100644 index 00000000..2ea0fd25 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlPostgreSqlClient.java @@ -0,0 +1,179 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package io.cdap.plugin.cloudsqlpostgresql; + +import io.cdap.e2e.utils.PluginPropertyUtils; +import org.junit.Assert; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.TimeZone; + +/** + * CloudSQLPostgreSQL client. + */ + +public class CloudSqlPostgreSqlClient { + + public static void main(String[] args) throws ClassNotFoundException, SQLException { + getCloudSqlConnection(); + } + + public static Connection getCloudSqlConnection() throws ClassNotFoundException, SQLException { + Class.forName("org.postgresql.Driver"); + String instanceConnectionName = "cdf-athena:europe-west1:cloud-postgresql-automation"; + String databaseName = "test_automation_db"; + String username = "v"; + String password = "v@123"; + String jdbcUrl = String.format( + "jdbc:postgresql://google/%s?cloudSqlInstance=%s&socketFactory=com.google.cloud.sql.postgres.SocketFactory&user=%s&password=%s", + databaseName, instanceConnectionName, username, password); + Connection conn = DriverManager.getConnection(jdbcUrl); + System.out.println("Connected to the database successfully"); + return conn; + } + + public static int countRecord(String table, String schema) throws SQLException, ClassNotFoundException { + String countQuery = "SELECT COUNT(*) as total FROM " + schema + "." + table; + try (Connection connect = getCloudSqlConnection(); + Statement statement = connect.createStatement(); + ResultSet rs = statement.executeQuery(countQuery)) { + int num = 0; + while (rs.next()) { + num = (rs.getInt(1)); + } + return num; + } + } + + /** + * Extracts entire data from source and target tables. + * @param sourceTable table at the source side + * @param targetTable table at the sink side + * @return true if the values in source and target side are equal + */ + public static boolean validateRecordValues(String sourceTable, String targetTable, String schema) + throws SQLException, ClassNotFoundException { + String getSourceQuery = "SELECT * FROM " + schema + "." + sourceTable; + String getTargetQuery = "SELECT * FROM " + schema + "." + targetTable; + try (Connection connect = getCloudSqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement2 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + ResultSet rsSource = statement1.executeQuery(getSourceQuery); + ResultSet rsTarget = statement2.executeQuery(getTargetQuery); + return compareResultSetData(rsSource, rsTarget); + } + } + + /** + * Compares the result Set data in source table and sink table.. + * @param rsSource result set of the source table data + * @param rsTarget result set of the target table data + * @return true if rsSource matches rsTarget + */ + public static boolean compareResultSetData(ResultSet rsSource, ResultSet rsTarget) throws SQLException { + ResultSetMetaData mdSource = rsSource.getMetaData(); + ResultSetMetaData mdTarget = rsTarget.getMetaData(); + int columnCountSource = mdSource.getColumnCount(); + int columnCountTarget = mdTarget.getColumnCount(); + Assert.assertEquals("Number of columns in source and target are not equal", + columnCountSource, columnCountTarget); + while (rsSource.next() && rsTarget.next()) { + int currentColumnCount = 1; + while (currentColumnCount <= columnCountSource) { + String columnTypeName = mdSource.getColumnTypeName(currentColumnCount); + int columnType = mdSource.getColumnType(currentColumnCount); + String columnName = mdSource.getColumnName(currentColumnCount); + if (columnType == Types.TIMESTAMP) { + GregorianCalendar gc = new GregorianCalendar(TimeZone.getTimeZone("UTC")); + gc.setGregorianChange(new Date(Long.MIN_VALUE)); + Timestamp sourceTS = rsSource.getTimestamp(currentColumnCount, gc); + Timestamp targetTS = rsTarget.getTimestamp(currentColumnCount, gc); + Assert.assertEquals(String.format("Different values found for column : %s", columnName), sourceTS, targetTS); + } else { + String sourceString = rsSource.getString(currentColumnCount); + String targetString = rsTarget.getString(currentColumnCount); + Assert.assertEquals(String.format("Different values found for column : %s", columnName), + sourceString, targetString); + } + currentColumnCount++; + } + } + Assert.assertFalse("Number of rows in Source table is greater than the number of rows in Target table", + rsSource.next()); + Assert.assertFalse("Number of rows in Target table is greater than the number of rows in Source table", + rsTarget.next()); + return true; + } + + //Changes by Surya + + public static void createSourceTable(String sourceTable, String schema) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); + Statement statement = connect.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); + String createSourceTableQuery = "CREATE TABLE " + schema + "." + sourceTable + datatypesColumns; + statement.executeUpdate(createSourceTableQuery); + + // Insert dummy data. + String datatypesValues = PluginPropertyUtils.pluginProp("datatypesValues"); + String datatypesColumnsList = PluginPropertyUtils.pluginProp("datatypesColumnsList"); + statement.executeUpdate("INSERT INTO " + schema + "." + sourceTable + " " + datatypesColumnsList + " " + + datatypesValues); + } + } + + public static void createTargetTable(String targetTable, String schema) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); + Statement statement = connect.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); + String createTargetTableQuery = "CREATE TABLE " + schema + "." + targetTable + " " + datatypesColumns; + statement.executeUpdate(createTargetTableQuery); + } + } + + public static void createTargetPostgresqlTable(String targetTable, String schema) throws SQLException, + ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); + Statement statement = connect.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("bigQueryDatatypesColumns"); + String createTargetTableQuery = "CREATE TABLE " + schema + "." + targetTable + " " + datatypesColumns; + statement.executeUpdate(createTargetTableQuery); + } + } + + public static void dropTables(String[] tables, String schema) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); + Statement statement = connect.createStatement()) { + for (String table : tables) { + String dropTableQuery = "Drop Table " + schema + "." + table; + statement.executeUpdate(dropTableQuery); + } + } + } +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunner.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunner.java new file mode 100644 index 00000000..8ccf5641 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunner.java @@ -0,0 +1,36 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.cloudsqlpostgresql.runners.sinkrunner; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute CloudSQLPostgreSQL Sink plugin testcases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"io.cdap.plugin.cloudsqlpostgresql.stepsdesign", "stepsdesign", "io.cdap.plugin.common.stepsdesign"}, + tags = {"@Cloudsqlpostgresql_Sink"}, + monochrome = true, + plugin = {"pretty", "html:target/cucumber-html-report/cloudsqlpostgresql-sink", + "json:target/cucumber-reports/cucumber-cloudsqlpostgresql-sink.json", + "junit:target/cucumber-reports/cucumber-cloudsqlpostgresql-sink.xml"} +) +public class TestRunner { +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunnerRequired.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunnerRequired.java new file mode 100644 index 00000000..e7313298 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunnerRequired.java @@ -0,0 +1,36 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.cloudsqlpostgresql.runners.sinkrunner; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute only required CloudSQLPostgreSQL Source plugin testcases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"io.cdap.plugin.cloudsqlpostgresql.stepsdesign", "stepsdesign", "io.cdap.plugin.common.stepsdesign"}, + tags = {"@Cloudsqlpostgresql_Sink_Required"}, + monochrome = true, + plugin = {"pretty", "html:target/cucumber-html-report/cloudsqlpostgresql-sink-required", + "json:target/cucumber-reports/cucumber-cloudsqlpostgresql-sink-required.json", + "junit:target/cucumber-reports/cucumber-cloudsqlpostgresql-sink-required.xml"} +) +public class TestRunnerRequired { +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runner/package-info.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/package-info.java similarity index 82% rename from cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runner/package-info.java rename to cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/package-info.java index cf665579..007d0ff5 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runner/package-info.java +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/package-info.java @@ -13,8 +13,7 @@ * License for the specific language governing permissions and limitations under * the License. */ - /** - * Package contains the runners for Mysql features. + * Package contains the runner for the CloudSQLPostgreSQL sink plugin. */ -package io.cdap.plugin.cloudsqlpostgresql.runner; +package io.cdap.plugin.cloudsqlpostgresql.runners.sinkrunner; diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunner.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunner.java new file mode 100644 index 00000000..71b54c6e --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunner.java @@ -0,0 +1,36 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.cloudsqlpostgresql.runners.sourcerunner; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute CloudSQLPostgreSQL Source plugin testcases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"io.cdap.plugin.cloudsqlpostgresql.stepsdesign", "stepsdesign", "io.cdap.plugin.common.stepsdesign"}, + tags = {"@Cloudsqlpostgresql_Source"}, + monochrome = true, + plugin = {"pretty", "html:target/cucumber-html-report/cloudsqlpostgresql-source", + "json:target/cucumber-reports/cucumber-cloudsqlpostgresql-source.json", + "junit:target/cucumber-reports/cucumber-cloudsqlpostgresql-source.xml"} +) +public class TestRunner { +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunnerRequired.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunnerRequired.java new file mode 100644 index 00000000..ba6c93f4 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunnerRequired.java @@ -0,0 +1,36 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.cloudsqlpostgresql.runners.sourcerunner; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute only required CloudSQLPostgreSQL Source plugin testcases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"io.cdap.plugin.cloudsqlpostgresql.stepsdesign", "stepsdesign", "io.cdap.plugin.common.stepsdesign"}, + tags = {"@Cloudsqlpostgresql_Source_Required"}, + monochrome = true, + plugin = {"pretty", "html:target/cucumber-html-report/cloudsqlpostgresql-source-required", + "json:target/cucumber-reports/cucumber-cloudsqlpostgresql-source-required.json", + "junit:target/cucumber-reports/cucumber-cloudsqlpostgresql-source-required.xml"} +) +public class TestRunnerRequired { +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/package-info.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/package-info.java new file mode 100644 index 00000000..02144ec6 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +/** + * Package contains the runner for the CloudSQLPostgreSQL source plugin. + */ +package io.cdap.plugin.cloudsqlpostgresql.runners.sourcerunner; diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSql.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSql.java deleted file mode 100644 index a3906d82..00000000 --- a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSql.java +++ /dev/null @@ -1,5 +0,0 @@ -package io.cdap.plugin.cloudsqlpostgresql.stepsdesign; - -public class CloudSql { - -} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSqlPostgreSql.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSqlPostgreSql.java new file mode 100644 index 00000000..3a921f73 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSqlPostgreSql.java @@ -0,0 +1,71 @@ +package io.cdap.plugin.cloudsqlpostgresql.stepsdesign; + +import io.cdap.e2e.pages.actions.CdfPipelineRunAction; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.CdfHelper; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.plugin.cloudsqlpostgresql.CloudSqlPostgreSqlClient; +import io.cucumber.java.en.Then; +import org.junit.Assert; +import stepsdesign.BeforeActions; + +import java.io.IOException; +import java.sql.SQLException; + +/** + * CLOUDSQLPOSTGRESQL Plugin related step design. + */ +public class CloudSqlPostgreSql implements CdfHelper { + + @Then("Click on preview data for CloudSQLPostgreSQL sink") + public void clickOnPreviewDataForCloudSQLPostgreSQLSink() { + openSinkPluginPreviewData("CloudSQL Postgres"); + } + + @Then("Validate the values of records transferred to target table is equal to the values from source table") + public void validateTheValuesOfRecordsTransferredToTargetTableIsEqualToTheValuesFromSourceTable() throws + SQLException, ClassNotFoundException { + int countRecords = CloudSqlPostgreSqlClient.countRecord(PluginPropertyUtils.pluginProp("targetTable"), + PluginPropertyUtils.pluginProp("schema")); + Assert.assertEquals("Number of records transferred should be equal to records out ", + countRecords, recordOut()); + BeforeActions.scenario.write(" ******** Number of records transferred ********:" + countRecords); + boolean recordsMatched = CloudSqlPostgreSqlClient.validateRecordValues( + PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("targetTable"), + PluginPropertyUtils.pluginProp("schema")); + Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + + "of the records in the source table", recordsMatched); + } + +// @Then("Validate the values of records transferred to target Big Query table is equal to the values from source table") +// public void validateTheValuesOfRecordsTransferredToTargetBigQueryTableIsEqualToTheValuesFromSourceTable() +// throws IOException, InterruptedException, IOException, SQLException, ClassNotFoundException { +// int targetBQRecordsCount = BigQueryClient.countBqQuery(PluginPropertyUtils.pluginProp("bqTargetTable")); +// BeforeActions.scenario.write("No of Records Transferred to BigQuery:" + targetBQRecordsCount); +// Assert.assertEquals("Out records should match with target BigQuery table records count", +// CdfPipelineRunAction.getCountDisplayedOnSourcePluginAsRecordsOut(), targetBQRecordsCount); +// +// boolean recordsMatched = BQValidation.validateBQAndDBRecordValues(PluginPropertyUtils.pluginProp("schema"), +// PluginPropertyUtils.pluginProp("sourceTable"), +// PluginPropertyUtils.pluginProp("bqTargetTable")); +// Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + +// "of the records in the source table", recordsMatched); +// } +// +// @Then("Validate the values of records transferred to target PostGreSQL table is equal to the values from source " + +// "BigQuery table") +// public void validateTheValuesOfRecordsTransferredToTargetPostGreSQLTableIsEqualToTheValuesFromSourceBigQueryTable() +// throws IOException, InterruptedException, IOException, SQLException, ClassNotFoundException { +// int sourceBQRecordsCount = BigQueryClient.countBqQuery(PluginPropertyUtils.pluginProp("bqSourceTable")); +// BeforeActions.scenario.write("No of Records from source BigQuery table:" + sourceBQRecordsCount); +// Assert.assertEquals("Out records should match with target PostgreSQL table records count", +// CdfPipelineRunAction.getCountDisplayedOnSourcePluginAsRecordsOut(), sourceBQRecordsCount); +// +// boolean recordsMatched = BQValidation.validateBqToDBTarget(PluginPropertyUtils.pluginProp("schema"), +// PluginPropertyUtils.pluginProp("bqSourceTable"), +// PluginPropertyUtils.pluginProp("targetTable")); +// Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + +// "of the records in the source table", recordsMatched); +// } +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java index 4641f34b..919ccf91 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java @@ -16,12 +16,24 @@ package io.cdap.plugin.common.stepsdesign; - +import com.google.cloud.bigquery.BigQueryException; +import io.cdap.e2e.utils.BigQueryClient; import io.cdap.e2e.utils.PluginPropertyUtils; -import io.cdap.plugin.cloudsqlpostgresql.CloudSqlClient; +import io.cdap.plugin.cloudsqlpostgresql.CloudSqlPostgreSqlClient; import io.cucumber.java.After; import io.cucumber.java.Before; import org.apache.commons.lang3.RandomStringUtils; +import org.apache.commons.lang3.StringUtils; +import org.junit.Assert; +import stepsdesign.BeforeActions; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.sql.SQLException; +import java.util.NoSuchElementException; +import java.util.UUID; /** * Cloudsql-postgresql test hooks. @@ -37,52 +49,109 @@ public static void setTableName() { PluginPropertyUtils.addPluginProp("sourceTable", sourceTableName); PluginPropertyUtils.addPluginProp("targetTable", targetTableName); String schema = PluginPropertyUtils.pluginProp("schema"); - PluginPropertyUtils.addPluginProp("selectQuery", String.format("Select * from auto.newtable;")); + PluginPropertyUtils.addPluginProp("selectQuery", + String.format("select * from %s.%s", schema, sourceTableName)); + } + + @Before(order = 2, value = "@CLOUDSQLPOSTGRESQL_SOURCE_TEST") + public static void createTables() throws SQLException, ClassNotFoundException { + CloudSqlPostgreSqlClient.createSourceTable(PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("schema")); + CloudSqlPostgreSqlClient.createTargetTable(PluginPropertyUtils.pluginProp("targetTable"), + PluginPropertyUtils.pluginProp("schema")); } -// @Before(order = 2, value = "@CLOUDSQL_SOURCE_TEST") -// public static void createTables() { -// CloudSqlClient.createSourceTable(PluginPropertyUtils.pluginProp("sourceTable"), -// PluginPropertyUtils.pluginProp("schema")); -// CloudSqlClient.createTargetTable(PluginPropertyUtils.pluginProp("targetTable"), -// PluginPropertyUtils.pluginProp("schema")); -// } - - @Before(order = 2, value = "CLOUDSQL_SOURCE_DATATYPES_TEST") - public static void createAllDatatypesTables() { - CloudSqlClient.createSourceDatatypesTable(PluginPropertyUtils.pluginProp("sourceTable"), - PluginPropertyUtils.pluginProp("schema")); - CloudSqlClient.createTargetDatatypesTable(PluginPropertyUtils.pluginProp("targetTable"), - PluginPropertyUtils.pluginProp("schema")); + + @After(order = 2, value = "@CLOUDSQLPOSTGRESQL_SINK_TEST") + public static void dropTables() throws SQLException, ClassNotFoundException { + CloudSqlPostgreSqlClient.dropTables(new String[]{PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("targetTable")}, + PluginPropertyUtils.pluginProp("schema")); } - @Before(order = 2, value = "@CLOUDSQL_SOURCE_DATATYPES_TEST2") - public static void createDatatypesTablesLong() { - CloudSqlClient.createSourceLongTable(PluginPropertyUtils.pluginProp("sourceTable"), - PluginPropertyUtils.pluginProp("schema")); - CloudSqlClient.createTargetLongTable(PluginPropertyUtils.pluginProp("targetTable"), - PluginPropertyUtils.pluginProp("schema")); + @Before(order = 2, value = "@CLOUDSQLPOSTGRESQL_TEST_TABLE") + public static void createPostgresqlTestTable() throws SQLException, ClassNotFoundException { + CloudSqlPostgreSqlClient.createTargetPostgresqlTable(PluginPropertyUtils.pluginProp("targetTable"), + PluginPropertyUtils.pluginProp("schema")); } - @Before(order = 2, value = "@CLOUDSQL_SOURCE_LONGRAW_TEST") - public static void createDatatypesTablesLongRaw() { - CloudSqlClient.createSourceLongRawTable(PluginPropertyUtils.pluginProp("sourceTable"), - PluginPropertyUtils.pluginProp("schema")); - CloudSqlClient.createTargetLongRawTable(PluginPropertyUtils.pluginProp("targetTable"), - PluginPropertyUtils.pluginProp("schema")); + @Before(order = 1, value = "@BQ_SINK_TEST") + public static void setTempTargetBQTableName() { + String bqTargetTableName = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); + PluginPropertyUtils.addPluginProp("bqTargetTable", bqTargetTableName); + BeforeActions.scenario.write("BQ Target table name - " + bqTargetTableName); } - @Before(order = 2, value = "@CLOUDSQL_SOURCE_DATATYPES_TEST4") - public static void createLongVarcharTables() { - CloudSqlClient.createSourceLongVarcharTable(PluginPropertyUtils.pluginProp("sourceTable"), - PluginPropertyUtils.pluginProp("schema")); - CloudSqlClient.createTargetLongVarCharTable(PluginPropertyUtils.pluginProp("targetTable"), - PluginPropertyUtils.pluginProp("schema")); + @After(order = 1, value = "@BQ_SINK_TEST") + public static void deleteTempTargetBQTable() throws IOException, InterruptedException { + String bqTargetTableName = PluginPropertyUtils.pluginProp("bqTargetTable"); + try { + BigQueryClient.dropBqQuery(bqTargetTableName); + BeforeActions.scenario.write("BQ Target table - " + bqTargetTableName + " deleted successfully"); + PluginPropertyUtils.removePluginProp("bqTargetTable"); + } catch (BigQueryException e) { + if (e.getMessage().contains("Not found: Table")) { + BeforeActions.scenario.write("BQ Target Table " + bqTargetTableName + " does not exist"); + } else { + Assert.fail(e.getMessage()); + } + } } - @After(order = 1, value = "@CLOUDSQL_SINK_TEST") - public static void dropTables() { - CloudSqlClient.deleteTables(PluginPropertyUtils.pluginProp("schema"), - new String[]{PluginPropertyUtils.pluginProp("sourceTable"), - PluginPropertyUtils.pluginProp("targetTable")}); + /** + * Create BigQuery table. + */ + @Before(order = 1, value = "@BQ_SOURCE_TEST") + public static void createTempSourceBQTable() throws IOException, InterruptedException { + createSourceBQTableWithQueries(PluginPropertyUtils.pluginProp("CreateBQTableQueryFile"), + PluginPropertyUtils.pluginProp("InsertBQDataQueryFile")); + } + + @After(order = 1, value = "@BQ_SOURCE_TEST") + public static void deleteTempSourceBQTable() throws IOException, InterruptedException { + String bqSourceTable = PluginPropertyUtils.pluginProp("bqSourceTable"); + BigQueryClient.dropBqQuery(bqSourceTable); + BeforeActions.scenario.write("BQ source Table " + bqSourceTable + " deleted successfully"); + PluginPropertyUtils.removePluginProp("bqSourceTable"); + } + + private static void createSourceBQTableWithQueries(String bqCreateTableQueryFile, String bqInsertDataQueryFile) + throws IOException, InterruptedException { + String bqSourceTable = "E2E_SOURCE_" + UUID.randomUUID().toString().substring(0, 5).replaceAll("-", + "_"); + + String createTableQuery = StringUtils.EMPTY; + try { + createTableQuery = new String(Files.readAllBytes(Paths.get(TestSetUpHooks.class.getResource + ("/" + bqCreateTableQueryFile).toURI())) + , StandardCharsets.UTF_8); + createTableQuery = createTableQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset")) + .replace("TABLE_NAME", bqSourceTable); + } catch (Exception e) { + BeforeActions.scenario.write("Exception in reading " + bqCreateTableQueryFile + " - " + e.getMessage()); + Assert.fail("Exception in BigQuery testdata prerequisite setup " + + "- error in reading create table query file " + e.getMessage()); + } + + String insertDataQuery = StringUtils.EMPTY; + try { + insertDataQuery = new String(Files.readAllBytes(Paths.get(TestSetUpHooks.class.getResource + ("/" + bqInsertDataQueryFile).toURI())) + , StandardCharsets.UTF_8); + insertDataQuery = insertDataQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset")) + .replace("TABLE_NAME", bqSourceTable); + } catch (Exception e) { + BeforeActions.scenario.write("Exception in reading " + bqInsertDataQueryFile + " - " + e.getMessage()); + Assert.fail("Exception in BigQuery testdata prerequisite setup " + + "- error in reading insert data query file " + e.getMessage()); + } + BigQueryClient.getSoleQueryResult(createTableQuery); + try { + BigQueryClient.getSoleQueryResult(insertDataQuery); + } catch (NoSuchElementException e) { + // Insert query does not return any record. + // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException + } + PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " created successfully"); } } diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties b/cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties index 41327888..3d795809 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties @@ -1,20 +1,23 @@ validationSuccessMessage=No errors found. -#connectionName=Connection Name must be in the format :: to connect to a public CloudSQL PostgreSQL instance. -name=SQL error while getting query schema: FATAL: password authentication failed for user "f" -password=SQL error while getting query schema: FATAL: password authentication failed for user "v" -referenceName=Invalid reference name '2942hd+'. Supported characters are: letters, numbers, and '_', '-', '.', or '$'. -databaseName=SQL error while getting query schema: FATAL: database "wdkdcw" does not exist -NumberOfSplit=Invalid value for Number of Splits '0'. Must be at least 1. Specify a Number of Splits no less than 1. -fetchSize=Invalid fetch size. Fetch size must be a positive integer. -fetechSizeBlank=Invalid fetch size. Fetch size must be a positive integer. -errorMessagesBoundingQuery=Bounding Query must be specified if Number of Splits is not set to 1. Specify the Bounding Query. -errorMesagesNumberOfSplit=Split-By Field Name must be specified if Number of Splits is not set to 1. Specify the Split-by Field Name. -errorMessagesSplitColumn=Split-By Field Name must be specified if Number of Splits is not set to 1. Specify the Split-by Field Name. -errorMessageNumberOfSplitWONumber=Unable to create config for batchsource CloudSQLPostgreSQL 'numSplits' is invalid: -errorMessageImportQuery=Import Query "Select * from auto.newtable;" must contain the string '$CONDITIONS'. if Number of Splits is not set to 1. Include '$CONDITIONS' in the Import Query - -invalidMessageConnectionName=Connection Name must be in the format :: to connect to a public CloudSQL PostgreSQL instance. -invalidMessageDatabaseName=Exception while trying to validate schema of database table 'auto."newtable"' for connection -invalidMessageTableNameField=Exception while trying to validate schema of database table 'auto."wrong"' -invalidReferenceName=Invalid reference name '93bhed92@13#$'. Supported characters are: letters, numbers, and '_', '-', '.', or '$'. -invalidUSERNAME=Exception while trying to validate schema of database table 'auto."newtable"' for connection \ No newline at end of file +errorMessageCloudPostgreSQLInvalidReferenceName=Invalid reference name 'invalidRef&^*&&*'. Supported characters are: \ + letters, numbers, and '_', '-', '.', or '$'. +errorMessageBoundingQuery=Bounding Query must be specified if Number of Splits is not set to 1. Specify the Bounding Query. +errorMessageBlankSplitBy=Split-By Field Name must be specified if Number of Splits is not set to 1. Specify the Split-by Field Name. +errorMessageInvalidNumberOfSplits=Invalid value for Number of Splits '0'. Must be at least 1. Specify a Number of Splits no less than 1. +errorMessageNumberOfSplitNotNumber=Unable to create config for batchsource CloudSQLPostgreSQL 'numSplits' is invalid: Value of \ + field class io.cdap.plugin.db.config.AbstractDBSpecificSourceConfig.numSplits is expected to be a number. +errorMessageInvalidFetchSize=Invalid fetch size. Fetch size must be a positive integer. +errorMessageInvalidSourceDatabase=SQL error while getting query schema: FATAL: database "invalidDatabase" does not exist +errorMessageInvalidImportQuery=Import Query select must contain the string '$CONDITIONS'. if Number of Splits is not set\ + \ to 1. Include '$CONDITIONS' in the Import Query +errorMessageBlankUsername=Username is required when password is given. +errorMessageBlankPassword=SQL error while getting query schema: The server requested password-based authentication, \ + but no password was provided. +errorMessageInvalidPassword=SQL error while getting query schema: FATAL: password authentication failed for user +errorMessageInvalidSourceHost=SQL error while getting query schema: The connection attempt failed. +errorMessageInvalidTableName=Table 'table' does not exist. Ensure table '"table"' is set correctly and that the +errorMessageInvalidSinkDatabase=Exception while trying to validate schema of database table '"TARGETTABLE_ +errorLogsMessageInvalidBoundingQuery=Spark program 'phase-1' failed with error: The column index is out of range: 1, \ + number of columns: 0.. Please check the system logs for more details. +errorMessageConnectionName=Connection Name must be in the format :: to connect to \ + a public CloudSQL PostgreSQL instance. diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties b/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties new file mode 100644 index 00000000..4f8cb950 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties @@ -0,0 +1,18 @@ +jdbcDriverName=select-jdbcPluginName +jdbcDriverNameMacroInput=jdbcPluginName +username=user +password=password +database=database +referenceName=referenceName +importQuery=importQuery +boundingQuery=boundingQuery +connectionArguments=connectionArguments +truncateTable=switch-truncateTable +truncateTableMacroInput=truncateTable +updateTableSchema=switch-allowSchemaRelaxation +updateTableSchemaMacroInput=allowSchemaRelaxation +outputSchemaMacroInput=Output Schema-macro-input +projectId=project +datasetProjectId=datasetProject +dataset=dataset +table=table diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties b/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties index 9aa1a056..b9fda856 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties @@ -1,42 +1,103 @@ -driverName=mysql -host=MYSQL_HOST -username=v -password=v@123 +driverName=cloudsql-postgresql +username=CLOUDSQLPOSTGRESQL_USERNAME +password=CLOUDSQLPOSTGRESQL_PASSWORD databaseName=test_automation_db +schema=public + +datatypesColumns=( id varchar(100) primary key, col1 bpchar, col2 bpchar(10), col3 varchar, col4 varchar(3), \ + col5 bytea, col6 int2, col7 int4, col8 int8, col10 numeric(10, 4), col11 numeric(10), col12 float4, col13 float8, \ + col14 money, col15 text, col16 name, col17 float8, col18 numeric(38), col22 timestamp, col23 timestamptz, \ + col24 time, col25 interval, col26 interval, col27 date, col28 timetz, col29 point, col30 line, col31 lseg, \ + col32 box, col33 path, col34 polygon, col35 circle, col36 cidr, col37 inet, col38 macaddr, col39 macaddr8, \ + col40 bit(2), col41 varbit(5), col42 json, col43 jsonb, col44 _pg_lsn, col45 pg_snapshot, col46 tsquery, \ + col47 tsvector, col48 txid_snapshot, col49 uuid, col50 xml, col51 int4range, col52 int8range, col53 numrange, \ + col54 tsrange, col55 tstzrange, col56 daterange, col57 pg_lsn, col58 int4, col59 int2, col60 int8, col61 real, \ + col62 smallint, col63 serial, col64 smallserial, col65 double precision, col66 bigint, col67 bigserial, col68 boolean) + +datatypesColumnsList=( id, col1, col2, col3, col4, col5, col6 , col7 , col8 , col10, col11, col12, col13, col14, \ + col15, col16, col17, col18, col22, col23, col24, col25, col26, col27, col28, col29, col30, col31, col32, col33, \ + col34, col35, col36, col37, col38, col39, col40, col41, col42, col43, col44, col45, col46, col47, col48, col49, \ + col50, col51, col52, col53, col54, col55, col56, col57, col58, col59, col60, col61, col62, col63, col64, col65,\ + col66, col67, col68 ) + +datatypesValues=VALUES ('User5', 'M', 'ABC...1234', 'B', 'ABC', decode('48656C6C6F20576F726C6421','hex'), 123, 123, \ + 123456, 123.4567, 123456789, 123.456, 123.456, 100.26, 'Hello World!', 'User 5', 123.456, 100, \ + '2023-01-01 07:30:00.000', '2023-01-01 15:30:00.000', '02:00:00', '6 mons 02:30:00'::interval, \ + '6 mons 02:30:00'::interval, '2001-01-01', '02:00:00', '(21.0,32.0)'::point, '{2.0,3.0,4.0}'::line, \ + '[(2.0,3.0),(4.0,5.0)]'::lseg, '(4.0,5.0),(2.0,3.0)'::box, '((2.0,3.0),(4.0,5.0),(6.0,7.0))'::path, \ + '((2.0,3.0),(4.0,5.0),(6.0,1.0))'::polygon, '<(4.0,5.0),2.0>'::circle, '192.168.0.0/24'::cidr, \ + '192.168.0.1/24'::inet, '08:00:2b:01:02:03'::macaddr, '08:00:2b:01:02:03:04:05'::macaddr8, '00', '11100', \ + '{"bar": "baz", "balance": 7.77, "active": false}'::json, '{"bar": "baz", "active": false, "balance": 7.77}'::jsonb, \ + '{16/B374D848}', NULL, '''fat'' & ''rat'''::tsquery, \ + '''a'' ''and'' ''ate'' ''cat'' ''fat'' ''mat'' ''on'' ''rat'' ''sat'''::tsvector, NULL, \ + 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11'::uuid, 'xml ''bar''', '[3,7)'::int4range, '[3,7)'::int8range, \ + '(1.0,14.0)'::numrange, '["2010-01-01 14:30:00","2010-01-01 15:30:00")'::tsrange, \ + '["2010-01-01 20:00:00+05:30","2010-01-01 21:00:00+05:30")'::tstzrange, '[1992-03-21,1994-06-26)'::daterange, \ + '16/B374D848'::pg_lsn, 2, 2, 2, '1234.5679', '600', DEFAULT, DEFAULT, '61.823765812', '2500000000000', \ + DEFAULT, false); + +datatypesSchema=[{"key":"id","value":"string"},{"key":"col1","value":"string"},{"key":"col2","value":"string"},\ + {"key":"col3","value":"string"},{"key":"col4","value":"string"},{"key":"col5","value":"bytes"},\ + {"key":"col6","value":"int"},{"key":"col7","value":"int"},{"key":"col8","value":"long"},\ + {"key":"col10","value":"decimal"},{"key":"col11","value":"decimal"},{"key":"col12","value":"float"},\ + {"key":"col13","value":"double"},{"key":"col14","value":"string"},{"key":"col15","value":"string"},\ + {"key":"col16","value":"string"},{"key":"col17","value":"double"},{"key":"col18","value":"decimal"},\ + {"key":"col22","value":"timestamp"},{"key":"col23","value":"timestamp"},{"key":"col24","value":"time"},\ + {"key":"col25","value":"string"},{"key":"col26","value":"string"},{"key":"col27","value":"date"},\ + {"key":"col28","value":"string"},{"key":"col29","value":"string"},{"key":"col30","value":"string"},\ + {"key":"col31","value":"string"},{"key":"col32","value":"string"},{"key":"col33","value":"string"},\ + {"key":"col34","value":"string"},{"key":"col35","value":"string"},{"key":"col36","value":"string"},\ + {"key":"col37","value":"string"},{"key":"col38","value":"string"},{"key":"col39","value":"string"},\ + {"key":"col40","value":"string"},{"key":"col41","value":"string"},{"key":"col42","value":"string"},\ + {"key":"col43","value":"string"},{"key":"col44","value":"string"},{"key":"col45","value":"string"},\ + {"key":"col46","value":"string"},{"key":"col47","value":"string"},{"key":"col48","value":"string"},\ + {"key":"col49","value":"string"},{"key":"col50","value":"string"},{"key":"col51","value":"string"},\ + {"key":"col52","value":"string"},{"key":"col53","value":"string"},{"key":"col54","value":"string"},\ + {"key":"col55","value":"string"},{"key":"col56","value":"string"},{"key":"col57","value":"string"},\ + {"key":"col58","value":"int"},{"key":"col59","value":"int"},{"key":"col60","value":"long"}, \ + {"key":"col61","value":"float"},{"key":"col62","value":"int"},{"key":"col63","value":"int"},\ + {"key":"col64","value":"int"},{"key":"col65","value":"double"},{"key":"col66","value":"long"},\ + {"key":"col67","value":"long"},{"key":"col68","value":"boolean"}] + +#CLOUDSQLPOSTGRESQL Invalid Properties +invalidRef=invalidRef&^*&&* +invalidDatabaseName=invalidDB +invalidImportQuery=select +invalidTableNameImportQuery=select * from abc; +invalidDriverName=abcdriver +invalidUserName=testUser +invalidPassword=testPassword +invalidBoundingQuery=SELECT MIN(id),MAX(id) FROM table +invalidBoundingQueryValue=select; +invalidTable=table +invalidConnectionName=abd3 + +#CLOUDSQLPOSTGRESQL Valid Properties +connectionArgumentsList=[{"key":"queryTimeout","value":"-1"}] +connectionTimeout=150 +numberOfSplits=2 zeroValue=0 -invalidSplit=abc -invalidSplitNumber=0 -wrongImportQuery="Select * from auto_updatable; -rightImportQuery=Select * from auto.newtable; -wrongReferenceName=93bhed92@13#$ -wrongUserName=null -wrongPassword=null -fetchSize=1000 -numberOfSplit= -splitBy= -blankSplit= -port=MYSQL_PORT -sourceRef=source -targetRef=target -outputSchema=[{"key":"col1","value":"int"},{"key":"col2","value":"string"},{"key":"col3","value":"string"},\ - {"key":"col4","value":"string"},{"key":"col5","value":"string"}] -user=user -pass=password -conn=conn -connectionArgument=1,new +splitByColumn=ID +importQuery = where $CONDITIONS connectionName=cdf-athena:europe-west1:cloud-postgresql-automation -connectionArgumentsList=[{"key":"queryTimeout","value":"-1"}] -tableName=newtable -validTableName=newtable -invalidTableName=wrong -dbSchemaName=auto -transactionLevel=TRANSACTION_READ_COMMITTED -connectionTimeout=10 -invalidConnectionName=abd3 -invalidDatabase=2nha -invalidUSERNAME=incorrect -macrosValueTable=newtable + #bq properties projectId=cdf-athena dataset=test_automation -BigQuery=sharedStyled__PluginNameContainer-cnOUhe dbXgBp +bqOutputMultipleDatatypesSchema= [{"key":"col1","value":"bytes"},{"key":"col2","value":"string"},\ + {"key":"col3","value":"date"},{"key":"col4","value":"double"},{"key":"col5","value":"decimal"},\ + {"key":"col6","value":"timestamp"},{"key":"col7","value":"decimal"},{"key":"col8","value":"boolean"},\ + {"key":"col9","value":"long"},{"key":"col10","value":"time"}] + +#bq macro properties +bqTruncateTable=true +bqUpdateTableSchema=true + +#bq queries file path +CreateBQTableQueryFile=testdata/BigQuery/BigQueryCreateTableQuery.txt +InsertBQDataQueryFile=testdata/BigQuery/BigQueryInsertDataQuery.txt + +#BIGQUERY Datatypes +bigQueryDatatypesColumns=(col1 bytea, col2 varchar(100), col3 date, col4 double precision, col5 numeric(5, 2), \ + col6 timestamp, col7 numeric(5, 2), col8 boolean, col9 int8, col10 time) +bigQueryDatatypesColumnsList=(col1,col2,col3,col4,col5,col6,col7,col8,col9,col10) diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableQuery.txt b/cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableQuery.txt new file mode 100644 index 00000000..1799b4a3 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableQuery.txt @@ -0,0 +1,2 @@ +create table `DATASET.TABLE_NAME` (col1 BYTES, col2 STRING, col3 DATE, col4 FLOAT64, col5 NUMERIC, col6 TIMESTAMP, +col7 BIGNUMERIC, col8 BOOL, col9 INT, col10 TIME) diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertDataQuery.txt b/cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertDataQuery.txt new file mode 100644 index 00000000..0f61bbe0 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertDataQuery.txt @@ -0,0 +1,5 @@ +insert into `DATASET.TABLE_NAME` (col1, col2, col3, col4, col5, col6, col7, col8, col9, col10) values +(b'01011011','30','2021-01-28',61.823765812,500.215, +'2019-03-10 04:50:01 UTC',500.214, false,200,'21:26:00'), +(b'00011011','10','2021-01-21',51.823765812,500.215, +'2018-03-10 04:50:01 UTC',500.214, true,206,'20:26:00'); From f4fa2493d01c226b3197efd7a461eee41edd328f Mon Sep 17 00:00:00 2001 From: suryakumari Date: Wed, 24 May 2023 17:26:56 +0530 Subject: [PATCH 3/5] e2e_Cloudsqlpostgresql run-time changes --- .../feature/cloudsql-postgresql/sink/RunTimeMacro.feature | 4 ++-- .../plugin/cloudsqlpostgresql/CloudSqlPostgreSqlClient.java | 2 -- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature index df6df385..f2a1ddd4 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature @@ -17,7 +17,7 @@ @Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required Feature: CloudSQL-PostgreSQL sink - Verify data transfer to PostgreSQL sink with macro arguments - @BQ_SOURCE_TEST @POSTGRESQL_TEST_TABLE + @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink using macro arguments in connection section Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -76,7 +76,7 @@ Feature: CloudSQL-PostgreSQL sink - Verify data transfer to PostgreSQL sink with Then Close the pipeline logs # Then Validate the values of records transferred to target PostGreSQL table is equal to the values from source BigQuery table - @BQ_SOURCE_TEST @POSTGRESQL_TEST_TABLE + @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink using macro arguments in basic section Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlPostgreSqlClient.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlPostgreSqlClient.java index 2ea0fd25..4124bfe0 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlPostgreSqlClient.java +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlPostgreSqlClient.java @@ -131,8 +131,6 @@ public static boolean compareResultSetData(ResultSet rsSource, ResultSet rsTarge return true; } - //Changes by Surya - public static void createSourceTable(String sourceTable, String schema) throws SQLException, ClassNotFoundException { try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement()) { From 2ae73acc8d57c8687c841e669f4601a6812ad977 Mon Sep 17 00:00:00 2001 From: bharatgulati Date: Tue, 16 May 2023 12:40:48 +0530 Subject: [PATCH 4/5] e2e_CloudSQLPostGreSQL Test scenarios --- cloudsql-postgresql-plugin/pom.xml | 8 +- .../sink/DesignTime.feature | 70 ++++ .../sink/DesignTimeWithMacros.feature | 53 +++ .../sink/DesignTimeWithValidation.feature | 143 ++++++++ .../cloudsql-postgresql/sink/RunTime.feature | 146 ++++++++ .../sink/RunTimeMacro.feature | 134 +++++++ .../source/DesignTime.feature | 57 +++ .../source/DesignTimeWithMacro.feature | 54 +++ .../source/DesignTimeWithValidation.feature | 229 ++++++++++++ .../source/RunTime.feature | 237 +++++++++++++ .../source/RunTimeMacro.feature | 334 ++++++++++++++++++ .../cloudsqlpostgresql/BQValidation.java | 268 ++++++++++++++ .../CloudSqlPostgreSqlClient.java | 174 +++++++++ .../runners/sinkrunner/TestRunner.java | 39 ++ .../sinkrunner/TestRunnerRequired.java | 36 ++ .../runners/sinkrunner/package-info.java | 19 + .../runners/sourcerunner/TestRunner.java | 38 ++ .../sourcerunner/TestRunnerRequired.java | 36 ++ .../runners/sourcerunner/package-info.java | 19 + .../stepsdesign/CloudSqlPostgreSql.java | 73 ++++ .../common/stepsdesign/TestSetUpHooks.java | 165 +++++++++ .../common/stepsdesign/package-info.java | 20 ++ .../resources/errorMessage.properties | 23 ++ .../pluginDataCyAttributes.properties | 18 + .../resources/pluginParameters.properties | 103 ++++++ .../BigQuery/BigQueryCreateTableQuery.txt | 2 + .../BigQuery/BigQueryInsertDataQuery.txt | 5 + 27 files changed, 2502 insertions(+), 1 deletion(-) create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlPostgreSqlClient.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunner.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunnerRequired.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/package-info.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunner.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunnerRequired.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/package-info.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSqlPostgreSql.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/package-info.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableQuery.txt create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertDataQuery.txt diff --git a/cloudsql-postgresql-plugin/pom.xml b/cloudsql-postgresql-plugin/pom.xml index e8736f8b..1e0c60b9 100644 --- a/cloudsql-postgresql-plugin/pom.xml +++ b/cloudsql-postgresql-plugin/pom.xml @@ -50,7 +50,7 @@ com.google.guava guava - 23.0 + 31.0.1-jre @@ -102,6 +102,12 @@ 42.3.1 test + + com.google.code.gson + gson + 2.8.8 + test + diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature new file mode 100644 index 00000000..2eac819b --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature @@ -0,0 +1,70 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +Feature: CloudSQL-PostgreSQL sink - Verify CloudSQL-PostgreSQL sink plugin design time scenarios + + Scenario: To verify CloudSQLPostgreSQL sink plugin validation with connection and basic details for connectivity + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + + Scenario: To verify CloudSQLPostgreSQL sink plugin validation with connection arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + + Scenario: To verify CloudSQLPostgreSQL sink plugin validation with advanced details with connection timeout + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "connectionTimeout" with value: "connectionTimeout" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature new file mode 100644 index 00000000..6c68180f --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature @@ -0,0 +1,53 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +Feature: CloudSQL-PostgreSQL sink- Verify CloudSQL-PostgreSQL sink plugin design time macro scenarios + + Scenario: To verify CloudSQLPostgreSQL sink plugin validation with macro enabled fields for connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostGreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostGreSQLUser" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostGreSQLPassword" + Then Click on the Macro button of Property: "connectionArguments" and set the value to: "cloudSQLPostGreSQLConnectionArguments" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + + Scenario: To verify CloudSQLPostgreSQL sink plugin validation with macro enabled fields for basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "tableName" and set the value to: "cloudSQLPostGreSQLTableName" + Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "cloudSQLPostGreSQLSchemaName" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature new file mode 100644 index 00000000..0eda6558 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature @@ -0,0 +1,143 @@ +# +# Copyright © 2022 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +Feature: CloudSQL-PostgreSQL Sink - Verify CloudSQL-postgreSQL Sink Plugin Error scenarios + + Scenario:Verify CloudSQLPostgreSQL sink plugin validation errors for mandatory fields + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Validate button + Then Verify mandatory property error for below listed properties: + | jdbcPluginName | + | referenceName | + | database | + | tableName | + + Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with invalid reference test data + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "invalidRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Click on the Validate button + Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "errorMessageCloudPostgreSQLInvalidReferenceName" + + Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with invalid connection name test data + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "invalidConnectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Click on the Validate button + Then Verify that the Plugin Property: "connectionName" is displaying an in-line error message: "errorMessageConnectionName" + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with invalid database + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "invalidDatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "errorMessageInvalidSinkDatabase" on the header + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with invalid table name + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "invalidTable" + Then Click on the Validate button + Then Verify that the Plugin Property: "tableName" is displaying an in-line error message: "errorMessageInvalidTableName" + + Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with blank username + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Click on the Validate button + Then Verify that the Plugin Property: "user" is displaying an in-line error message: "errorMessageBlankUsername" diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature new file mode 100644 index 00000000..2216773c --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature @@ -0,0 +1,146 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +Feature: CloudSQL-PostgreSQL sink - Verify data transfer from BigQuery source to CloudSQL-PostgreSQL sink + + @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE @PLUGIN-1526 + Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink successfully with supported datatypes + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on preview data for CloudSQLPostgreSQL sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target CloudSQLPostGreSQL table is equal to the values from source BigQuery table + + @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE @PLUGIN-1526 + Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink successfully when connection arguments are set + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on preview data for CloudSQLPostgreSQL sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target CloudSQLPostGreSQL table is equal to the values from source BigQuery table + + @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE @PLUGIN-1526 + Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink with Advanced property Connection timeout + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Replace input plugin property: "connectionTimeout" with value: "connectionTimeout" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on preview data for CloudSQLPostgreSQL sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target CloudSQLPostGreSQL table is equal to the values from source BigQuery table diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature new file mode 100644 index 00000000..cf6a89cb --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature @@ -0,0 +1,134 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +Feature: CloudSQL-PostgreSQL sink - Verify data transfer to PostgreSQL sink with macro arguments + + @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE @PLUGIN-1629 @PLUGIN-1526 + Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink using macro arguments in connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId" + Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId" + Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" + Then Click on the Macro button of Property: "table" and set the value to: "bqTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqSourceTable" for key "bqTable" + Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqSourceTable" for key "bqTable" + Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLPostGreSQL table is equal to the values from source BigQuery table + + @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE @PLUGIN-1629 @PLUGIN-1526 + Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink using macro arguments in basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId" + Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId" + Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" + Then Click on the Macro button of Property: "table" and set the value to: "bqTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "tableName" and set the value to: "cloudSQLPostgreSQLTableName" + Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "cloudSQLPostgreSQLSchemaName" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqSourceTable" for key "bqTable" + Then Enter runtime argument value "targetTable" for key "cloudSQLPostgreSQLTableName" + Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqSourceTable" for key "bqTable" + Then Enter runtime argument value "targetTable" for key "cloudSQLPostgreSQLTableName" + Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLPostGreSQL table is equal to the values from source BigQuery table diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature new file mode 100644 index 00000000..fdedeaa7 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature @@ -0,0 +1,57 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +Feature: CloudSQL-PostgreSQL source - Verify CloudSQLPostgreSQL source plugin design time scenarios + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify CloudSQLPostgreSQL source plugin validation with connection and basic details for connectivity + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify CloudSQLPostgreSQL source plugin validation with connection arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature new file mode 100644 index 00000000..25e7ac52 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature @@ -0,0 +1,54 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +Feature: CloudSQL-PostgreSQL source - Verify CloudSQL-PostgreSQL source plugin design time macros scenarios + + Scenario: To verify CloudSQLPostgreSQL source plugin validation with macro enabled fields for connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostGreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostGreSQLUser" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostGreSQLPassword" + Then Click on the Macro button of Property: "connectionArguments" and set the value to: "cloudSQLPostGreSQLConnectionArguments" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + + Scenario: To verify CloudSQLPostgreSQL source plugin validation with macro enabled fields for basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "splitBy" and set the value to: "cloudSQLPostGreSQLSplitBy" + Then Click on the Macro button of Property: "fetchSize" and set the value to: "cloudSQLPostGreSQLFetchSize" + Then Click on the Macro button of Property: "boundingQuery" and set the value in textarea: "cloudSQLPostGreSQLBoundingQuery" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "cloudSQLPostGreSQLImportQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature new file mode 100644 index 00000000..907906b4 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature @@ -0,0 +1,229 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +Feature: CloudSQL-postgreSQL source - Verify CloudSQL-postgreSQL source plugin design time validation scenarios + + Scenario:Verify CloudSQLPostgreSQL source plugin validation errors for mandatory fields + Given Open Datafusion Project to configure pipeline + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Validate button + Then Verify mandatory property error for below listed properties: + | jdbcPluginName | + | database | + | referenceName | + | importQuery | + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid reference test data + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "invalidRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "errorMessageCloudPostgreSQLInvalidReferenceName" + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid connection name test data + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "invalidConnectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin Property: "connectionName" is displaying an in-line error message: "errorMessageConnectionName" + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with blank bounding query + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "invalidDatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "invalidImportQuery" + Then Replace input plugin property: "splitBy" with value: "splitBy" + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Click on the Validate button + Then Verify that the Plugin Property: "boundingQuery" is displaying an in-line error message: "errorMessageBoundingQuery" + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageBoundingQuery" + Then Verify that the Plugin Property: "importQuery" is displaying an in-line error message: "errorMessageInvalidImportQuery" + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with number of splits without split by field name + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Click on the Validate button + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageBlankSplitBy" + Then Verify that the Plugin Property: "splitBy" is displaying an in-line error message: "errorMessageBlankSplitBy" + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message when number of Split value is not a number + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "numSplits" with value: "zeroSplits" + Then Click on the Validate button + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageNumberOfSplitNotNumber" + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message when number of Split value is changed to zero + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "numSplits" with value: "zeroValue" + Then Click on the Validate button + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageInvalidNumberOfSplits" + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message when fetch size is changed to zero + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "fetchSize" with value: "zeroValue" + Then Click on the Validate button + Then Verify that the Plugin Property: "fetchSize" is displaying an in-line error message: "errorMessageInvalidFetchSize" + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid database + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "invalidDatabase" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "errorMessageInvalidSourceDatabase" on the header + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid import query + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "invalidImportQuery" + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Click on the Validate button + Then Verify that the Plugin Property: "importQuery" is displaying an in-line error message: "errorMessageInvalidImportQuery" + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with blank username + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "invalidImportQuery" + Then Click on the Validate button + Then Verify that the Plugin Property: "user" is displaying an in-line error message: "errorMessageBlankUsername" + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with blank password + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "invalidDatabase" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "errorMessageBlankPassword" on the header + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid password + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "invalidPassword" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "invalidDatabase" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "errorMessageInvalidPassword" on the header diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature new file mode 100644 index 00000000..770541b2 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature @@ -0,0 +1,237 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +Feature: CloudSQL-PostGreSQL Source - Run Time scenarios + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @BQ_SINK_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @PLUGIN-1526 + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL source to BigQuery sink successfully with supported datatypes + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @BQ_SINK_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @PLUGIN-1526 + Scenario: To verify data is getting transferred from PostgreSQL source to BigQuery sink successfully when connection arguments are set + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @BQ_SINK_TEST + Scenario: To verify pipeline failure message in logs when an invalid bounding query is provided + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "splitBy" with value: "splitBy" + Then Enter textarea plugin property: "importQuery" with value: "importQuery" + Then Click on the Get Schema button + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Enter textarea plugin property: "boundingQuery" with value: "invalidBoundingQueryValue" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + And Save and Deploy Pipeline + And Run the Pipeline in Runtime + And Wait till pipeline is in running state + And Verify the pipeline status is "Failed" + Then Open Pipeline logs and verify Log entries having below listed Level and Message: + | Level | Message | + | ERROR | errorLogsMessageInvalidBoundingQuery | + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @BQ_SINK_TEST + Scenario: To verify the pipeline fails while preview with invalid bounding query setting the split-By field + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "splitBy" with value: "splitBy" + Then Enter textarea plugin property: "importQuery" with value: "importQuery" + Then Click on the Get Schema button + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Enter textarea plugin property: "boundingQuery" with value: "invalidBoundingQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Verify the preview run status of pipeline in the logs is "failed" + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify data is getting transferred from PostgreSQL to PostgreSQL successfully with supported datatypes + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on preview data for CloudSQLPostgreSQL sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target table is equal to the values from source table diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature new file mode 100644 index 00000000..9729ce1e --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature @@ -0,0 +1,334 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +Feature: CloudSQL-PostGreSQL source - Verify CloudSQL-PostGreSQL plugin data transfer with macro arguments + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL to CloudSQLPostgreSQL successfully using macro arguments in connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target table is equal to the values from source table + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL to CloudSQLPostgreSQL successfully using macro arguments in basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "splitBy" and set the value to: "cloudSQLPostgreSQLSplitByColumn" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "cloudSQLPostgreSQLImportQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "tableName" and set the value to: "cloudSQLPostgreSQLTableName" + Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "cloudSQLPostgreSQLSchemaName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Validate "CloudSQL PostgreSQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "splitByColumn" for key "cloudSQLPostgreSQLSplitByColumn" + Then Enter runtime argument value "selectQuery" for key "cloudSQLPostgreSQLImportQuery" + Then Enter runtime argument value "targetTable" for key "cloudSQLPostgreSQLTableName" + Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "splitByColumn" for key "cloudSQLPostgreSQLSplitByColumn" + Then Enter runtime argument value "selectQuery" for key "cloudSQLPostgreSQLImportQuery" + Then Enter runtime argument value "targetTable" for key "cloudSQLPostgreSQLTableName" + Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target table is equal to the values from source table + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify pipeline preview fails when invalid connection details provided using macro arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "invalidDriverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value "invalidUserName" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value "invalidPassword" for key "cloudSQLPostgreSQLPassword" + Then Run the preview of pipeline with runtime arguments + Then Verify the preview of pipeline is "Failed" + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify pipeline preview fails when invalid basic details provided using macro arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "cloudSQLPostgreSQLInvalidImportQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "tableName" and set the value to: "cloudSQLPostgreSQLTableName" + Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "cloudSQLPostgreSQLSchemaName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Validate "CloudSQL PostgreSQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "invalidTableNameImportQuery" for key "cloudSQLPostgreSQLInvalidImportQuery" + Then Enter runtime argument value "invalidTable" for key "cloudSQLPostgreSQLTableName" + Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName" + Then Run the preview of pipeline with runtime arguments + Then Verify the preview of pipeline is "Failed" + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @BQ_SINK_TEST @PLUGIN-1526 + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL source to BigQuery sink using macro arguments in connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId" + Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId" + Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" + Then Click on the Macro button of Property: "table" and set the value to: "bqTable" + Then Click on the Macro button of Property: "truncateTableMacroInput" and set the value to: "bqTruncateTable" + Then Click on the Macro button of Property: "updateTableSchemaMacroInput" and set the value to: "bqUpdateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword" + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqTargetTable" for key "bqTable" + Then Enter runtime argument value "bqTruncateTable" for key "bqTruncateTable" + Then Enter runtime argument value "bqUpdateTableSchema" for key "bqUpdateTableSchema" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword" + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqTargetTable" for key "bqTable" + Then Enter runtime argument value "bqTruncateTable" for key "bqTruncateTable" + Then Enter runtime argument value "bqUpdateTableSchema" for key "bqUpdateTableSchema" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @BQ_SINK_TEST @PLUGIN-1526 + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL source to BigQuery sink using macro arguments in basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "splitBy" and set the value to: "cloudSQLPostgreSQLSplitByColumn" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "cloudSQLPostgreSQLImportQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId" + Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId" + Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" + Then Click on the Macro button of Property: "table" and set the value to: "bqTable" + Then Click on the Macro button of Property: "truncateTableMacroInput" and set the value to: "bqTruncateTable" + Then Click on the Macro button of Property: "updateTableSchemaMacroInput" and set the value to: "bqUpdateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "splitByColumn" for key "cloudSQLPostgreSQLSplitByColumn" + Then Enter runtime argument value "selectQuery" for key "cloudSQLPostgreSQLImportQuery" + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqTargetTable" for key "bqTable" + Then Enter runtime argument value "bqTruncateTable" for key "bqTruncateTable" + Then Enter runtime argument value "bqUpdateTableSchema" for key "bqUpdateTableSchema" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "splitByColumn" for key "cloudSQLPostgreSQLSplitByColumn" + Then Enter runtime argument value "selectQuery" for key "cloudSQLPostgreSQLImportQuery" + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqTargetTable" for key "bqTable" + Then Enter runtime argument value "bqTruncateTable" for key "bqTruncateTable" + Then Enter runtime argument value "bqUpdateTableSchema" for key "bqUpdateTableSchema" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java new file mode 100644 index 00000000..86214b9b --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java @@ -0,0 +1,268 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.cloudsqlpostgresql; + +import com.google.cloud.bigquery.TableResult; +import com.google.gson.Gson; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.PluginPropertyUtils; +import org.apache.spark.sql.types.Decimal; +import org.junit.Assert; + +import java.io.IOException; +import java.math.BigDecimal; +import java.math.RoundingMode; +import java.sql.*; +import java.time.*; +import java.util.Date; +import java.text.ParseException; +import java.time.format.DateTimeFormatter; +import java.util.*; + +/** + * BQValidation. + */ +public class BQValidation { + static List BigQueryResponse = new ArrayList<>(); + static List bigQueryRows = new ArrayList<>(); + + /** + * Extracts entire data from source and target tables. + * + * @param sourceTable table at the source side + * @param targetTable table at the sink side + * @return true if the values in source and target side are equal + */ + public static boolean validateDBToBQRecordValues(String schema, String sourceTable, String targetTable) + throws SQLException, ClassNotFoundException, IOException, InterruptedException { + getBigQueryTableData(targetTable, bigQueryRows); + for (Object rows : bigQueryRows) { + JsonObject json = new Gson().fromJson(String.valueOf(rows), JsonObject.class); + BigQueryResponse.add(json); + } + String getSourceQuery = "SELECT * FROM " + schema + "." + sourceTable; + try (Connection connect = CloudSqlPostgreSqlClient.getCloudSqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + + ResultSet rsSource = statement1.executeQuery(getSourceQuery); + return compareResultSetandJsonData(rsSource, BigQueryResponse); + } + } + public static boolean validateBQToDBRecordValues(String schema, String sourceTable, String targetTable) + throws SQLException, ClassNotFoundException, IOException, InterruptedException { + getBigQueryTableData(sourceTable, bigQueryRows); + for (Object rows : bigQueryRows) { + JsonObject json = new Gson().fromJson(String.valueOf(rows), JsonObject.class); + BigQueryResponse.add(json); + } + String getTargetQuery = "SELECT * FROM " + schema + "." + targetTable; + try (Connection connect = CloudSqlPostgreSqlClient.getCloudSqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + + ResultSet rsTarget = statement1.executeQuery(getTargetQuery); + return compareResultSetandJsonData(rsTarget, BigQueryResponse); + } + } + /** + * Retrieves the data from a specified BigQuery table and populates it into the provided list of objects. + * + * @param table The name of the BigQuery table to fetch data from. + * @param bigQueryRows The list to store the fetched BigQuery data. + */ + private static void getBigQueryTableData(String table, List bigQueryRows) + throws IOException, InterruptedException { + + String projectId = PluginPropertyUtils.pluginProp("projectId"); + String dataset = PluginPropertyUtils.pluginProp("dataset"); + String selectQuery = "SELECT TO_JSON(t) FROM `" + projectId + "." + dataset + "." + table + "` AS t"; + TableResult result = BigQueryClient.getQueryResult(selectQuery); + result.iterateAll().forEach(value -> bigQueryRows.add(value.get(0).getValue())); + } + + /** + * Compares the data in the result set obtained from the Oracle database with the provided BigQuery JSON objects. + * + * @param rsSource The result set obtained from the Oracle database. + * @param bigQueryData The list of BigQuery JSON objects to compare with the result set data. + * @return True if the result set data matches the BigQuery data, false otherwise. + * @throws SQLException If an SQL error occurs during the result set operations. + * @throws ParseException If an error occurs while parsing the data. + */ + public static boolean compareResultSetandJsonData(ResultSet rsSource, List bigQueryData) + throws SQLException { + ResultSetMetaData mdSource = rsSource.getMetaData(); + boolean result = false; + int columnCountSource = mdSource.getColumnCount(); + + if (bigQueryData == null) { + Assert.fail("bigQueryData is null"); + return result; + } + // Get the column count of the first JsonObject in bigQueryData + int jsonObjectIdx = 0; + int columnCountTarget = 0; + if (bigQueryData.size() > 0) { + columnCountTarget = bigQueryData.get(jsonObjectIdx).entrySet().size(); + } + // Compare the number of columns in the source and target + Assert.assertEquals("Number of columns in source and target are not equal", + columnCountSource, columnCountTarget); + + while (rsSource.next()) { + int currentColumnCount = 1; + while (currentColumnCount <= columnCountSource) { + String columnTypeName = mdSource.getColumnTypeName(currentColumnCount); + int columnType = mdSource.getColumnType(currentColumnCount); + String columnName = mdSource.getColumnName(currentColumnCount); + // Perform different comparisons based on column type + switch (columnType) { + case Types.BIT: + boolean bqDateString = bigQueryData.get(jsonObjectIdx).get(columnName).getAsBoolean(); + result = getBooleanValidation(rsSource, String.valueOf(bqDateString), columnName, columnTypeName); + Assert.assertTrue("Different values found for column : %s", result); + break; + + case Types.DECIMAL: + case Types.NUMERIC: + BigDecimal sourceDecimal = rsSource.getBigDecimal(currentColumnCount); + BigDecimal targetDecimal = bigQueryData.get(jsonObjectIdx).get(columnName).getAsBigDecimal(); + int desiredScale = 2; // Set the desired scale (number of decimal places) + BigDecimal adjustedSourceValue = sourceDecimal.setScale(desiredScale, RoundingMode.HALF_UP); + BigDecimal adjustedTargetValue = targetDecimal.setScale(desiredScale, RoundingMode.HALF_UP); + Decimal sourceDecimalValue = Decimal.fromDecimal(adjustedSourceValue); + Decimal targetDecimalValue = Decimal.fromDecimal(adjustedTargetValue); + Assert.assertEquals("Different values found for column : %s", sourceDecimalValue, targetDecimalValue); + break; + + case Types.REAL: + float sourceReal = rsSource.getFloat(currentColumnCount); + float targetReal = bigQueryData.get(jsonObjectIdx).get(columnName).getAsFloat(); + Assert.assertTrue(String.format("Different values found for column : %s", columnName), + Float.compare(sourceReal, targetReal) == 0); + break; + + case Types.TIMESTAMP: + break; + + case Types.TIME: + String bqTimeString = bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); + result = getTimeValidation(rsSource, bqTimeString, columnName, columnTypeName); + Assert.assertTrue("Different values found for column : %s", result); + break; + + case Types.BINARY: + case Types.VARBINARY: + String sourceB64String = new String(Base64.getEncoder().encode(rsSource.getBytes(currentColumnCount))); + String targetB64String = bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); + Assert.assertEquals("Different values found for column : %s", + sourceB64String, targetB64String); + break; + + case Types.BIGINT: + long sourceVal = rsSource.getLong(currentColumnCount); + long targetVal = bigQueryData.get(jsonObjectIdx).get(columnName).getAsLong(); + Assert.assertTrue("Different values found for column : %s", + String.valueOf(sourceVal).equals(String.valueOf(targetVal))); + break; + + case Types.SMALLINT: + case Types.TINYINT: + case Types.INTEGER: + int sourceInt = rsSource.getInt(currentColumnCount); + int targetInt = bigQueryData.get(jsonObjectIdx).get(columnName).getAsInt(); + Assert.assertTrue("Different values found for column : %s", + String.valueOf(sourceInt).equals(String.valueOf(targetInt))); + break; + + case Types.DATE: + Date dateSource = rsSource.getDate(currentColumnCount); + Date dateTarget = java.sql.Date.valueOf( + bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertEquals("Different values found for column : %s", dateSource, dateTarget); + break; + + case Types.DOUBLE: + Double sourceMoney = rsSource.getDouble(currentColumnCount); + String targetMoneyStr = bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); + Double targetMoney; + // Remove non-numeric characters from the targetMoneyStr + targetMoneyStr = targetMoneyStr.replaceAll("[^0-9.]", ""); + targetMoney = new Double(targetMoneyStr); + Assert.assertTrue(String.format("Different values found for column: %s", columnName), + sourceMoney.compareTo(targetMoney) == 0); + break; + + case Types.VARCHAR: + case Types.CHAR: + case Types.SQLXML: + case Types.OTHER: + default: + String sourceValue = rsSource.getString(currentColumnCount); + JsonElement jsonElement = bigQueryData.get(jsonObjectIdx).get(columnName); + String targetValue = (jsonElement != null && !jsonElement.isJsonNull()) ? jsonElement.getAsString() : null; + Assert.assertEquals( + String.format("Different %s values found for column : %s", columnTypeName, columnName), + String.valueOf(sourceValue), String.valueOf(targetValue)); + } + currentColumnCount++; + } + jsonObjectIdx++; + } + Assert.assertFalse("Number of rows in Source table is greater than the number of rows in Target table", + rsSource.next()); + return true; + } + + private static boolean getBooleanValidation(ResultSet rsSource, String bqDateString, String columnName, + String columnTypeName) throws SQLException { + switch (columnTypeName) { + case "bit": + byte source = rsSource.getByte(columnName); + boolean sourceAsBoolean = source != 0; + return String.valueOf(sourceAsBoolean).equals(String.valueOf(bqDateString)); + case "bool": + boolean sourceValue = rsSource.getBoolean(columnName); + return String.valueOf(sourceValue).equals(String.valueOf(bqDateString)); + default: + return false; + } + } + + private static boolean getTimeValidation(ResultSet rsSource, String bqDateString, String columnName, String + columnTypeName) throws SQLException { + switch (columnTypeName) { + case "time": + Time sourceTime = rsSource.getTime(columnName); + Time targetTime = Time.valueOf(bqDateString); + return sourceTime.equals(targetTime); + case "timetz": + Time sourceT = rsSource.getTime(columnName); + LocalTime sourceLocalTime = sourceT.toLocalTime(); + OffsetTime targetOffsetTime = OffsetTime.parse(bqDateString, DateTimeFormatter.ISO_OFFSET_TIME); + LocalTime targetLocalTime = targetOffsetTime.toLocalTime(); + return String.valueOf(sourceLocalTime).equals(String.valueOf(targetLocalTime)); + + default: + return false; + } + } +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlPostgreSqlClient.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlPostgreSqlClient.java new file mode 100644 index 00000000..69412d43 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlPostgreSqlClient.java @@ -0,0 +1,174 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package io.cdap.plugin.cloudsqlpostgresql; + +import io.cdap.e2e.utils.PluginPropertyUtils; +import org.junit.Assert; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.TimeZone; + +/** + * CloudSQLPostgreSQL client. + */ + +public class CloudSqlPostgreSqlClient { + public static Connection getCloudSqlConnection() throws ClassNotFoundException, SQLException { + Class.forName("org.postgresql.Driver"); + String database=PluginPropertyUtils.pluginProp("databaseName"); + String instanceConnectionName = System.getenv("CONNECTION_NAME"); + String username = System.getenv("CLOUDSQL_POSTGRESQL_USERNAME"); + String password = System.getenv("CLOUDSQL_POSTGRESQL_PASSWORD"); + + String jdbcUrl = String.format( + "jdbc:postgresql://google/%s?cloudSqlInstance=%s&socketFactory=com.google.cloud.sql.postgres.SocketFactory&user=%s&password=%s", + database,instanceConnectionName, username, password); + Connection conn = DriverManager.getConnection(jdbcUrl); + System.out.println("Connected to the database successfully"); + return conn; + } + + public static int countRecord(String table, String schema) throws SQLException, ClassNotFoundException { + String countQuery = "SELECT COUNT(*) as total FROM " + schema + "." + table; + try (Connection connect = getCloudSqlConnection(); + Statement statement = connect.createStatement(); + ResultSet rs = statement.executeQuery(countQuery)) { + int num = 0; + while (rs.next()) { + num = (rs.getInt(1)); + } + return num; + } + } + + /** + * Extracts entire data from source and target tables. + * @param sourceTable table at the source side + * @param targetTable table at the sink side + * @return true if the values in source and target side are equal + */ + public static boolean validateRecordValues(String sourceTable, String targetTable, String schema) + throws SQLException, ClassNotFoundException { + String getSourceQuery = "SELECT * FROM " + schema + "." + sourceTable; + String getTargetQuery = "SELECT * FROM " + schema + "." + targetTable; + try (Connection connect = getCloudSqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement2 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + ResultSet rsSource = statement1.executeQuery(getSourceQuery); + ResultSet rsTarget = statement2.executeQuery(getTargetQuery); + return compareResultSetData(rsSource, rsTarget); + } + } + + /** + * Compares the result Set data in source table and sink table.. + * @param rsSource result set of the source table data + * @param rsTarget result set of the target table data + * @return true if rsSource matches rsTarget + */ + public static boolean compareResultSetData(ResultSet rsSource, ResultSet rsTarget) throws SQLException { + ResultSetMetaData mdSource = rsSource.getMetaData(); + ResultSetMetaData mdTarget = rsTarget.getMetaData(); + int columnCountSource = mdSource.getColumnCount(); + int columnCountTarget = mdTarget.getColumnCount(); + Assert.assertEquals("Number of columns in source and target are not equal", + columnCountSource, columnCountTarget); + while (rsSource.next() && rsTarget.next()) { + int currentColumnCount = 1; + while (currentColumnCount <= columnCountSource) { + String columnTypeName = mdSource.getColumnTypeName(currentColumnCount); + int columnType = mdSource.getColumnType(currentColumnCount); + String columnName = mdSource.getColumnName(currentColumnCount); + if (columnType == Types.TIMESTAMP) { + GregorianCalendar gc = new GregorianCalendar(TimeZone.getTimeZone("UTC")); + gc.setGregorianChange(new Date(Long.MIN_VALUE)); + Timestamp sourceTS = rsSource.getTimestamp(currentColumnCount, gc); + Timestamp targetTS = rsTarget.getTimestamp(currentColumnCount, gc); + Assert.assertEquals(String.format("Different values found for column : %s", columnName), sourceTS, targetTS); + } else { + String sourceString = rsSource.getString(currentColumnCount); + String targetString = rsTarget.getString(currentColumnCount); + Assert.assertEquals(String.format("Different values found for column : %s", columnName), + sourceString, targetString); + } + currentColumnCount++; + } + } + Assert.assertFalse("Number of rows in Source table is greater than the number of rows in Target table", + rsSource.next()); + Assert.assertFalse("Number of rows in Target table is greater than the number of rows in Source table", + rsTarget.next()); + return true; + } + + public static void createSourceTable(String sourceTable, String schema) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); + Statement statement = connect.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); + String createSourceTableQuery = "CREATE TABLE " + schema + "." + sourceTable + datatypesColumns; + statement.executeUpdate(createSourceTableQuery); + System.out.println(createSourceTableQuery); + + // Insert dummy data. + String datatypesValues = PluginPropertyUtils.pluginProp("datatypesValues"); + String datatypesColumnsList = PluginPropertyUtils.pluginProp("datatypesColumnsList"); + statement.executeUpdate("INSERT INTO " + schema + "." + sourceTable + " " + datatypesColumnsList + " " + + datatypesValues); + } + } + + public static void createTargetTable(String targetTable, String schema) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); + Statement statement = connect.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); + String createTargetTableQuery = "CREATE TABLE " + schema + "." + targetTable + " " + datatypesColumns; + statement.executeUpdate(createTargetTableQuery); + } + } + + public static void createTargetPostgresqlTable(String targetTable, String schema) throws SQLException, + ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); + Statement statement = connect.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("bigQueryDatatypesColumns"); + String createTargetTableQuery = "CREATE TABLE " + schema + "." + targetTable + " " + datatypesColumns; + statement.executeUpdate(createTargetTableQuery); + } + } + + public static void dropTables(String[] tables, String schema) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); + Statement statement = connect.createStatement()) { + for (String table : tables) { + String dropTableQuery = "Drop Table " + schema + "." + table; + statement.executeUpdate(dropTableQuery); + } + } + } +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunner.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunner.java new file mode 100644 index 00000000..c51429a5 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunner.java @@ -0,0 +1,39 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.cloudsqlpostgresql.runners.sinkrunner; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute CloudSQLPostgreSQL Sink plugin testcases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"io.cdap.plugin.cloudsqlpostgresql.stepsdesign", "stepsdesign", "io.cdap.plugin.common.stepsdesign"}, + tags = {"@Cloudsqlpostgresql_Sink and not @PLUGIN-1629 and not @PLUGIN-1526"}, + /* TODO :Enable tests once issue fixed https://cdap.atlassian.net/browse/PLUGIN-1629, + https://cdap.atlassian.net/browse/PLUGIN-1526 + */ + monochrome = true, + plugin = {"pretty", "html:target/cucumber-html-report/cloudsqlpostgresql-sink", + "json:target/cucumber-reports/cucumber-cloudsqlpostgresql-sink.json", + "junit:target/cucumber-reports/cucumber-cloudsqlpostgresql-sink.xml"} +) +public class TestRunner { +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunnerRequired.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunnerRequired.java new file mode 100644 index 00000000..e7313298 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunnerRequired.java @@ -0,0 +1,36 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.cloudsqlpostgresql.runners.sinkrunner; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute only required CloudSQLPostgreSQL Source plugin testcases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"io.cdap.plugin.cloudsqlpostgresql.stepsdesign", "stepsdesign", "io.cdap.plugin.common.stepsdesign"}, + tags = {"@Cloudsqlpostgresql_Sink_Required"}, + monochrome = true, + plugin = {"pretty", "html:target/cucumber-html-report/cloudsqlpostgresql-sink-required", + "json:target/cucumber-reports/cucumber-cloudsqlpostgresql-sink-required.json", + "junit:target/cucumber-reports/cucumber-cloudsqlpostgresql-sink-required.xml"} +) +public class TestRunnerRequired { +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/package-info.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/package-info.java new file mode 100644 index 00000000..007d0ff5 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +/** + * Package contains the runner for the CloudSQLPostgreSQL sink plugin. + */ +package io.cdap.plugin.cloudsqlpostgresql.runners.sinkrunner; diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunner.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunner.java new file mode 100644 index 00000000..c1700f1b --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunner.java @@ -0,0 +1,38 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.cloudsqlpostgresql.runners.sourcerunner; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute CloudSQLPostgreSQL Source plugin testcases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"io.cdap.plugin.cloudsqlpostgresql.stepsdesign", "stepsdesign", "io.cdap.plugin.common.stepsdesign"}, + tags = {"@Cloudsqlpostgresql_Source and not @PLUGIN-1526"}, + /* TODO :Enable tests once issue fixed https://cdap.atlassian.net/browse/PLUGIN-1526 + */ + monochrome = true, + plugin = {"pretty", "html:target/cucumber-html-report/cloudsqlpostgresql-source", + "json:target/cucumber-reports/cucumber-cloudsqlpostgresql-source.json", + "junit:target/cucumber-reports/cucumber-cloudsqlpostgresql-source.xml"} +) +public class TestRunner { +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunnerRequired.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunnerRequired.java new file mode 100644 index 00000000..ba6c93f4 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunnerRequired.java @@ -0,0 +1,36 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.cloudsqlpostgresql.runners.sourcerunner; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute only required CloudSQLPostgreSQL Source plugin testcases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"io.cdap.plugin.cloudsqlpostgresql.stepsdesign", "stepsdesign", "io.cdap.plugin.common.stepsdesign"}, + tags = {"@Cloudsqlpostgresql_Source_Required"}, + monochrome = true, + plugin = {"pretty", "html:target/cucumber-html-report/cloudsqlpostgresql-source-required", + "json:target/cucumber-reports/cucumber-cloudsqlpostgresql-source-required.json", + "junit:target/cucumber-reports/cucumber-cloudsqlpostgresql-source-required.xml"} +) +public class TestRunnerRequired { +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/package-info.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/package-info.java new file mode 100644 index 00000000..02144ec6 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +/** + * Package contains the runner for the CloudSQLPostgreSQL source plugin. + */ +package io.cdap.plugin.cloudsqlpostgresql.runners.sourcerunner; diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSqlPostgreSql.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSqlPostgreSql.java new file mode 100644 index 00000000..041cdb47 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSqlPostgreSql.java @@ -0,0 +1,73 @@ +package io.cdap.plugin.cloudsqlpostgresql.stepsdesign; + +import io.cdap.e2e.pages.actions.CdfPipelineRunAction; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.CdfHelper; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.plugin.cloudsqlpostgresql.BQValidation; +import io.cdap.plugin.cloudsqlpostgresql.CloudSqlPostgreSqlClient; +import io.cucumber.java.en.Then; +import org.junit.Assert; +import stepsdesign.BeforeActions; + +import java.io.IOException; +import java.sql.SQLException; +import java.text.ParseException; + +/** + * CLOUDSQLPOSTGRESQL Plugin related step design. + */ +public class CloudSqlPostgreSql implements CdfHelper { + + @Then("Click on preview data for CloudSQLPostgreSQL sink") + public void clickOnPreviewDataForCloudSQLPostgreSQLSink() { + openSinkPluginPreviewData("CloudSQLPostgreSQL"); + } + + @Then("Validate the values of records transferred to target table is equal to the values from source table") + public void validateTheValuesOfRecordsTransferredToTargetTableIsEqualToTheValuesFromSourceTable() throws + SQLException, ClassNotFoundException { + int countRecords = CloudSqlPostgreSqlClient.countRecord(PluginPropertyUtils.pluginProp("targetTable"), + PluginPropertyUtils.pluginProp("schema")); + Assert.assertEquals("Number of records transferred should be equal to records out ", + countRecords, recordOut()); + BeforeActions.scenario.write(" ******** Number of records transferred ********:" + countRecords); + boolean recordsMatched = CloudSqlPostgreSqlClient.validateRecordValues( + PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("targetTable"), + PluginPropertyUtils.pluginProp("schema")); + Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + + "of the records in the source table", recordsMatched); + } + + @Then("Validate the values of records transferred to target Big Query table is equal to the values from source table") + public void validateTheValuesOfRecordsTransferredToTargetBigQueryTableIsEqualToTheValuesFromSourceTable() + throws IOException, InterruptedException, IOException, SQLException, ClassNotFoundException, ParseException { + int targetBQRecordsCount = BigQueryClient.countBqQuery(PluginPropertyUtils.pluginProp("bqTargetTable")); + BeforeActions.scenario.write("No of Records Transferred to BigQuery:" + targetBQRecordsCount); + Assert.assertEquals("Out records should match with target BigQuery table records count", + CdfPipelineRunAction.getCountDisplayedOnSourcePluginAsRecordsOut(), targetBQRecordsCount); + + boolean recordsMatched = BQValidation.validateDBToBQRecordValues(PluginPropertyUtils.pluginProp("schema"), + PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("bqTargetTable")); + Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + + "of the records in the source table", recordsMatched); + } + + @Then("Validate the values of records transferred to target CloudSQLPostGreSQL table is equal to the values from source " + + "BigQuery table") + public void validateTheValuesOfRecordsTransferredToTargetCloudSQLPostGreSQLTableIsEqualToTheValuesFromSourceBigQueryTable() + throws IOException, InterruptedException, IOException, SQLException, ClassNotFoundException, ParseException { + int sourceBQRecordsCount = BigQueryClient.countBqQuery(PluginPropertyUtils.pluginProp("bqSourceTable")); + BeforeActions.scenario.write("No of Records from source BigQuery table:" + sourceBQRecordsCount); + Assert.assertEquals("Out records should match with target PostgreSQL table records count", + CdfPipelineRunAction.getCountDisplayedOnSourcePluginAsRecordsOut(), sourceBQRecordsCount); + + boolean recordsMatched = BQValidation.validateBQToDBRecordValues(PluginPropertyUtils.pluginProp("schema"), + PluginPropertyUtils.pluginProp("bqSourceTable"), + PluginPropertyUtils.pluginProp("targetTable")); + Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + + "of the records in the source table", recordsMatched); + } +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java new file mode 100644 index 00000000..359b241d --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java @@ -0,0 +1,165 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package io.cdap.plugin.common.stepsdesign; + +import com.google.cloud.bigquery.BigQueryException; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.plugin.cloudsqlpostgresql.CloudSqlPostgreSqlClient; +import io.cucumber.java.After; +import io.cucumber.java.Before; +import org.apache.commons.lang3.RandomStringUtils; +import org.apache.commons.lang3.StringUtils; +import org.junit.Assert; +import stepsdesign.BeforeActions; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.sql.SQLException; +import java.util.NoSuchElementException; +import java.util.UUID; + +/** + * Cloudsql-postgresql test hooks. + */ + +public class TestSetUpHooks { + + @Before(order = 1) + public static void setTableName() { + String randomString = RandomStringUtils.randomAlphabetic(10).toLowerCase(); + String sourceTableName = String.format("sourcetable_%s", randomString); + String targetTableName = String.format("targettable_%s", randomString); + PluginPropertyUtils.addPluginProp("sourceTable", sourceTableName); + PluginPropertyUtils.addPluginProp("targetTable", targetTableName); + String schema = PluginPropertyUtils.pluginProp("schema"); + PluginPropertyUtils.addPluginProp("selectQuery", + String.format("select * from %s.%s", schema, sourceTableName)); + } + + @Before(order = 2, value = "@CLOUDSQLPOSTGRESQL_SOURCE_TEST") + public static void createTables() throws SQLException, ClassNotFoundException { + CloudSqlPostgreSqlClient.createSourceTable(PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("schema")); + CloudSqlPostgreSqlClient.createTargetTable(PluginPropertyUtils.pluginProp("targetTable"), + PluginPropertyUtils.pluginProp("schema")); + } + + @After(order = 2, value = "@CLOUDSQLPOSTGRESQL_SINK_TEST") + public static void dropTables() throws SQLException, ClassNotFoundException { + CloudSqlPostgreSqlClient.dropTables(new String[]{PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("targetTable")}, + PluginPropertyUtils.pluginProp("schema")); + } + + @Before(order = 2, value = "@CLOUDSQLPOSTGRESQL_TEST_TABLE") + public static void createPostgresqlTestTable() throws SQLException, ClassNotFoundException { + CloudSqlPostgreSqlClient.createTargetPostgresqlTable(PluginPropertyUtils.pluginProp("targetTable"), + PluginPropertyUtils.pluginProp("schema")); + } + + @After(order = 1, value = "@CLOUDSQLPOSTGRESQL_TEST_TABLE") + public static void dropTestTables() throws SQLException, ClassNotFoundException { + CloudSqlPostgreSqlClient.dropTables(new String[] {PluginPropertyUtils.pluginProp("targetTable")}, + PluginPropertyUtils.pluginProp("schema")); + } + + @Before(order = 1, value = "@BQ_SINK_TEST") + public static void setTempTargetBQTableName() { + String bqTargetTableName = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); + PluginPropertyUtils.addPluginProp("bqTargetTable", bqTargetTableName); + BeforeActions.scenario.write("BQ Target table name - " + bqTargetTableName); + } + + @After(order = 1, value = "@BQ_SINK_TEST") + public static void deleteTempTargetBQTable() throws IOException, InterruptedException { + String bqTargetTableName = PluginPropertyUtils.pluginProp("bqTargetTable"); + try { + BigQueryClient.dropBqQuery(bqTargetTableName); + BeforeActions.scenario.write("BQ Target table - " + bqTargetTableName + " deleted successfully"); + PluginPropertyUtils.removePluginProp("bqTargetTable"); + } catch (BigQueryException e) { + if (e.getMessage().contains("Not found: Table")) { + BeforeActions.scenario.write("BQ Target Table " + bqTargetTableName + " does not exist"); + } else { + Assert.fail(e.getMessage()); + } + } + } + + /** + * Create BigQuery table. + */ + @Before(order = 1, value = "@BQ_SOURCE_TEST") + public static void createTempSourceBQTable() throws IOException, InterruptedException { + createSourceBQTableWithQueries(PluginPropertyUtils.pluginProp("CreateBQTableQueryFile"), + PluginPropertyUtils.pluginProp("InsertBQDataQueryFile")); + } + + @After(order = 1, value = "@BQ_SOURCE_TEST") + public static void deleteTempSourceBQTable() throws IOException, InterruptedException { + String bqSourceTable = PluginPropertyUtils.pluginProp("bqSourceTable"); + BigQueryClient.dropBqQuery(bqSourceTable); + BeforeActions.scenario.write("BQ source Table " + bqSourceTable + " deleted successfully"); + PluginPropertyUtils.removePluginProp("bqSourceTable"); + } + + private static void createSourceBQTableWithQueries(String bqCreateTableQueryFile, String bqInsertDataQueryFile) + throws IOException, InterruptedException { + String bqSourceTable = "E2E_SOURCE_" + UUID.randomUUID().toString().substring(0, 5).replaceAll("-", + "_"); + + String createTableQuery = StringUtils.EMPTY; + try { + createTableQuery = new String(Files.readAllBytes(Paths.get(TestSetUpHooks.class.getResource + ("/" + bqCreateTableQueryFile).toURI())) + , StandardCharsets.UTF_8); + createTableQuery = createTableQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset")) + .replace("TABLE_NAME", bqSourceTable); + } catch (Exception e) { + BeforeActions.scenario.write("Exception in reading " + bqCreateTableQueryFile + " - " + e.getMessage()); + Assert.fail("Exception in BigQuery testdata prerequisite setup " + + "- error in reading create table query file " + e.getMessage()); + } + + String insertDataQuery = StringUtils.EMPTY; + try { + insertDataQuery = new String(Files.readAllBytes(Paths.get(TestSetUpHooks.class.getResource + ("/" + bqInsertDataQueryFile).toURI())) + , StandardCharsets.UTF_8); + insertDataQuery = insertDataQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset")) + .replace("TABLE_NAME", bqSourceTable); + } catch (Exception e) { + BeforeActions.scenario.write("Exception in reading " + bqInsertDataQueryFile + " - " + e.getMessage()); + Assert.fail("Exception in BigQuery testdata prerequisite setup " + + "- error in reading insert data query file " + e.getMessage()); + } + BigQueryClient.getSoleQueryResult(createTableQuery); + try { + BigQueryClient.getSoleQueryResult(insertDataQuery); + } catch (NoSuchElementException e) { + // Insert query does not return any record. + // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException + } + PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " created successfully"); + + } + +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/package-info.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/package-info.java new file mode 100644 index 00000000..63f8efab --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/package-info.java @@ -0,0 +1,20 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** + * Package contains the stepDesign for common features. + */ +package io.cdap.plugin.common.stepsdesign; diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties b/cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties new file mode 100644 index 00000000..3d795809 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties @@ -0,0 +1,23 @@ +validationSuccessMessage=No errors found. +errorMessageCloudPostgreSQLInvalidReferenceName=Invalid reference name 'invalidRef&^*&&*'. Supported characters are: \ + letters, numbers, and '_', '-', '.', or '$'. +errorMessageBoundingQuery=Bounding Query must be specified if Number of Splits is not set to 1. Specify the Bounding Query. +errorMessageBlankSplitBy=Split-By Field Name must be specified if Number of Splits is not set to 1. Specify the Split-by Field Name. +errorMessageInvalidNumberOfSplits=Invalid value for Number of Splits '0'. Must be at least 1. Specify a Number of Splits no less than 1. +errorMessageNumberOfSplitNotNumber=Unable to create config for batchsource CloudSQLPostgreSQL 'numSplits' is invalid: Value of \ + field class io.cdap.plugin.db.config.AbstractDBSpecificSourceConfig.numSplits is expected to be a number. +errorMessageInvalidFetchSize=Invalid fetch size. Fetch size must be a positive integer. +errorMessageInvalidSourceDatabase=SQL error while getting query schema: FATAL: database "invalidDatabase" does not exist +errorMessageInvalidImportQuery=Import Query select must contain the string '$CONDITIONS'. if Number of Splits is not set\ + \ to 1. Include '$CONDITIONS' in the Import Query +errorMessageBlankUsername=Username is required when password is given. +errorMessageBlankPassword=SQL error while getting query schema: The server requested password-based authentication, \ + but no password was provided. +errorMessageInvalidPassword=SQL error while getting query schema: FATAL: password authentication failed for user +errorMessageInvalidSourceHost=SQL error while getting query schema: The connection attempt failed. +errorMessageInvalidTableName=Table 'table' does not exist. Ensure table '"table"' is set correctly and that the +errorMessageInvalidSinkDatabase=Exception while trying to validate schema of database table '"TARGETTABLE_ +errorLogsMessageInvalidBoundingQuery=Spark program 'phase-1' failed with error: The column index is out of range: 1, \ + number of columns: 0.. Please check the system logs for more details. +errorMessageConnectionName=Connection Name must be in the format :: to connect to \ + a public CloudSQL PostgreSQL instance. diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties b/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties new file mode 100644 index 00000000..4f8cb950 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties @@ -0,0 +1,18 @@ +jdbcDriverName=select-jdbcPluginName +jdbcDriverNameMacroInput=jdbcPluginName +username=user +password=password +database=database +referenceName=referenceName +importQuery=importQuery +boundingQuery=boundingQuery +connectionArguments=connectionArguments +truncateTable=switch-truncateTable +truncateTableMacroInput=truncateTable +updateTableSchema=switch-allowSchemaRelaxation +updateTableSchemaMacroInput=allowSchemaRelaxation +outputSchemaMacroInput=Output Schema-macro-input +projectId=project +datasetProjectId=datasetProject +dataset=dataset +table=table diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties b/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties new file mode 100644 index 00000000..fb415f9a --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties @@ -0,0 +1,103 @@ +driverName=cloudsql-postgresql +username=CLOUDSQL_POSTGRESQL_USERNAME +password=CLOUDSQL_POSTGRESQL_PASSWORD +databaseName=test_automation_db +schema=public + +datatypesColumns=( id varchar(100) primary key, col1 bpchar, col2 bpchar(10), col3 varchar, col4 varchar(3), \ + col5 bytea, col6 int2, col7 int4, col8 int8, col10 numeric(10, 4), col11 numeric(10), col12 float4, col13 float8, \ + col14 money, col15 text, col16 name, col17 float8, col18 numeric(38), col22 timestamp, col23 timestamptz, \ + col24 time, col25 interval, col26 interval, col27 date, col28 timetz, col29 point, col30 line, col31 lseg, \ + col32 box, col33 path, col34 polygon, col35 circle, col36 cidr, col37 inet, col38 macaddr, col39 macaddr8, \ + col40 bit(2), col41 varbit(5), col42 json, col43 jsonb, col44 _pg_lsn, col45 pg_snapshot, col46 tsquery, \ + col47 tsvector, col48 txid_snapshot, col49 uuid, col50 xml, col51 int4range, col52 int8range, col53 numrange, \ + col54 tsrange, col55 tstzrange, col56 daterange, col57 pg_lsn, col58 int4, col59 int2, col60 int8, col61 real, \ + col62 smallint, col63 serial, col64 smallserial, col65 double precision, col66 bigint, col67 bigserial, col68 boolean) + +datatypesColumnsList=( id, col1, col2, col3, col4, col5, col6 , col7 , col8 , col10, col11, col12, col13, col14, \ + col15, col16, col17, col18, col22, col23, col24, col25, col26, col27, col28, col29, col30, col31, col32, col33, \ + col34, col35, col36, col37, col38, col39, col40, col41, col42, col43, col44, col45, col46, col47, col48, col49, \ + col50, col51, col52, col53, col54, col55, col56, col57, col58, col59, col60, col61, col62, col63, col64, col65,\ + col66, col67, col68 ) + +datatypesValues=VALUES ('User5', 'M', 'ABC...1234', 'B', 'ABC', decode('48656C6C6F20576F726C6421','hex'), 123, 123, \ + 123456, 123.4567, 123456789, 123.456, 123.456, 100.26, 'Hello World!', 'User 5', 123.456, 100, \ + '2023-01-01 07:30:00.000', '2023-01-01 15:30:00.000', '02:00:00', '6 mons 02:30:00'::interval, \ + '6 mons 02:30:00'::interval, '2001-01-01', '02:00:00', '(21.0,32.0)'::point, '{2.0,3.0,4.0}'::line, \ + '[(2.0,3.0),(4.0,5.0)]'::lseg, '(4.0,5.0),(2.0,3.0)'::box, '((2.0,3.0),(4.0,5.0),(6.0,7.0))'::path, \ + '((2.0,3.0),(4.0,5.0),(6.0,1.0))'::polygon, '<(4.0,5.0),2.0>'::circle, '192.168.0.0/24'::cidr, \ + '192.168.0.1/24'::inet, '08:00:2b:01:02:03'::macaddr, '08:00:2b:01:02:03:04:05'::macaddr8, '00', '11100', \ + '{"bar": "baz", "balance": 7.77, "active": false}'::json, '{"bar": "baz", "active": false, "balance": 7.77}'::jsonb, \ + '{16/B374D848}', NULL, '''fat'' & ''rat'''::tsquery, \ + '''a'' ''and'' ''ate'' ''cat'' ''fat'' ''mat'' ''on'' ''rat'' ''sat'''::tsvector, NULL, \ + 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11'::uuid, 'xml ''bar''', '[3,7)'::int4range, '[3,7)'::int8range, \ + '(1.0,14.0)'::numrange, '["2010-01-01 14:30:00","2010-01-01 15:30:00")'::tsrange, \ + '["2010-01-01 20:00:00+05:30","2010-01-01 21:00:00+05:30")'::tstzrange, '[1992-03-21,1994-06-26)'::daterange, \ + '16/B374D848'::pg_lsn, 2, 2, 2, '1234.5679', '600', DEFAULT, DEFAULT, '61.823765812', '2500000000000', \ + DEFAULT, false); + +datatypesSchema=[{"key":"id","value":"string"},{"key":"col1","value":"string"},{"key":"col2","value":"string"},\ + {"key":"col3","value":"string"},{"key":"col4","value":"string"},{"key":"col5","value":"bytes"},\ + {"key":"col6","value":"int"},{"key":"col7","value":"int"},{"key":"col8","value":"long"},\ + {"key":"col10","value":"decimal"},{"key":"col11","value":"decimal"},{"key":"col12","value":"float"},\ + {"key":"col13","value":"double"},{"key":"col14","value":"string"},{"key":"col15","value":"string"},\ + {"key":"col16","value":"string"},{"key":"col17","value":"double"},{"key":"col18","value":"decimal"},\ + {"key":"col22","value":"timestamp"},{"key":"col23","value":"timestamp"},{"key":"col24","value":"time"},\ + {"key":"col25","value":"string"},{"key":"col26","value":"string"},{"key":"col27","value":"date"},\ + {"key":"col28","value":"string"},{"key":"col29","value":"string"},{"key":"col30","value":"string"},\ + {"key":"col31","value":"string"},{"key":"col32","value":"string"},{"key":"col33","value":"string"},\ + {"key":"col34","value":"string"},{"key":"col35","value":"string"},{"key":"col36","value":"string"},\ + {"key":"col37","value":"string"},{"key":"col38","value":"string"},{"key":"col39","value":"string"},\ + {"key":"col40","value":"string"},{"key":"col41","value":"string"},{"key":"col42","value":"string"},\ + {"key":"col43","value":"string"},{"key":"col44","value":"string"},{"key":"col45","value":"string"},\ + {"key":"col46","value":"string"},{"key":"col47","value":"string"},{"key":"col48","value":"string"},\ + {"key":"col49","value":"string"},{"key":"col50","value":"string"},{"key":"col51","value":"string"},\ + {"key":"col52","value":"string"},{"key":"col53","value":"string"},{"key":"col54","value":"string"},\ + {"key":"col55","value":"string"},{"key":"col56","value":"string"},{"key":"col57","value":"string"},\ + {"key":"col58","value":"int"},{"key":"col59","value":"int"},{"key":"col60","value":"long"}, \ + {"key":"col61","value":"float"},{"key":"col62","value":"int"},{"key":"col63","value":"int"},\ + {"key":"col64","value":"int"},{"key":"col65","value":"double"},{"key":"col66","value":"long"},\ + {"key":"col67","value":"long"},{"key":"col68","value":"boolean"}] + +#CLOUDSQLPOSTGRESQL Invalid Properties +invalidRef=invalidRef&^*&&* +invalidDatabaseName=invalidDB +invalidImportQuery=select +invalidTableNameImportQuery=select * from abc; +invalidDriverName=abcdriver +invalidUserName=testUser +invalidPassword=testPassword +invalidBoundingQuery=SELECT MIN(id),MAX(id) FROM table +invalidBoundingQueryValue=select; +invalidTable=table +invalidConnectionName=abd3 + +#CLOUDSQLPOSTGRESQL Valid Properties +connectionArgumentsList=[{"key":"queryTimeout","value":"-1"}] +connectionTimeout=150 +numberOfSplits=2 +zeroValue=0 +splitByColumn=ID +importQuery = where $CONDITIONS +connectionName=CONNECTION_NAME + +#bq properties +projectId=cdf-athena +dataset=test_automation +bqOutputMultipleDatatypesSchema= [{"key":"col1","value":"bytes"},{"key":"col2","value":"string"},\ + {"key":"col3","value":"date"},{"key":"col4","value":"double"},{"key":"col5","value":"decimal"},\ + {"key":"col6","value":"timestamp"},{"key":"col7","value":"decimal"},{"key":"col8","value":"boolean"},\ + {"key":"col9","value":"long"},{"key":"col10","value":"time"}] + +#bq macro properties +bqTruncateTable=true +bqUpdateTableSchema=true + +#bq queries file path +CreateBQTableQueryFile=testdata/BigQuery/BigQueryCreateTableQuery.txt +InsertBQDataQueryFile=testdata/BigQuery/BigQueryInsertDataQuery.txt + +#BIGQUERY Datatypes +bigQueryDatatypesColumns=(col1 bytea, col2 varchar(100), col3 date, col4 double precision, col5 numeric(5, 2), \ + col6 timestamp, col7 numeric(5, 2), col8 boolean, col9 int8, col10 time) +bigQueryDatatypesColumnsList=(col1,col2,col3,col4,col5,col6,col7,col8,col9,col10) diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableQuery.txt b/cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableQuery.txt new file mode 100644 index 00000000..1799b4a3 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableQuery.txt @@ -0,0 +1,2 @@ +create table `DATASET.TABLE_NAME` (col1 BYTES, col2 STRING, col3 DATE, col4 FLOAT64, col5 NUMERIC, col6 TIMESTAMP, +col7 BIGNUMERIC, col8 BOOL, col9 INT, col10 TIME) diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertDataQuery.txt b/cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertDataQuery.txt new file mode 100644 index 00000000..a2829c8d --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertDataQuery.txt @@ -0,0 +1,5 @@ +insert into `DATASET.TABLE_NAME` (col1, col2, col3, col4, col5, col6, col7, col8, col9, col10) values +(b'01011011','30','2021-01-28',61.823765812,500.22, +'2019-03-10 04:50:01 UTC',500.21, false,200,'21:26:00'), +(b'00011011','10','2021-01-21',51.823765812,500.22, +'2018-03-10 04:50:01 UTC',500.21, true,206,'20:26:00'); From 194d85d04ef72657909b30e9f4de024228182b26 Mon Sep 17 00:00:00 2001 From: AnkitCLI Date: Mon, 5 Jun 2023 15:20:10 +0530 Subject: [PATCH 5/5] initial commits --- cloudsql-postgresql-plugin/pom.xml | 7 +- .../sink/DesignTime.feature | 2 +- .../sink/DesignTimeWithMacros.feature | 2 +- .../sink/DesignTimeWithValidation.feature | 4 +- .../cloudsql-postgresql/sink/RunTime.feature | 8 +- .../sink/RunTimeMacro.feature | 2 +- .../source/DesignTime.feature | 2 +- .../source/DesignTimeWithMacro.feature | 2 +- .../source/DesignTimeWithValidation.feature | 2 +- .../source/RunTime.feature | 4 +- .../source/RunTimeMacro.feature | 2 +- .../cdap/plugin/CloudSqlPostgreSqlClient.java | 169 +++++++++++++++++ .../cloudsqlpostgresql/BQValidation.java | 35 ++-- .../CloudSqlPostgreSqlClient.java | 174 ------------------ .../cloudsqlpostgresql/package-info.java | 20 ++ .../runners/sinkrunner/TestRunner.java | 4 +- .../sinkrunner/TestRunnerRequired.java | 9 +- .../runners/sinkrunner/package-info.java | 3 +- .../runners/sourcerunner/TestRunner.java | 4 +- .../sourcerunner/TestRunnerRequired.java | 6 +- .../runners/sourcerunner/package-info.java | 3 +- .../stepsdesign/CloudSqlPostgreSql.java | 24 ++- .../stepsdesign/package-info.java | 20 ++ .../common/stepsdesign/TestSetUpHooks.java | 2 +- .../java/io/cdap/plugin/package-info.java | 20 ++ .../resources/pluginParameters.properties | 10 +- 26 files changed, 306 insertions(+), 234 deletions(-) create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/CloudSqlPostgreSqlClient.java delete mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlPostgreSqlClient.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/package-info.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/package-info.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/package-info.java diff --git a/cloudsql-postgresql-plugin/pom.xml b/cloudsql-postgresql-plugin/pom.xml index 1e0c60b9..2f974e85 100644 --- a/cloudsql-postgresql-plugin/pom.xml +++ b/cloudsql-postgresql-plugin/pom.xml @@ -102,12 +102,7 @@ 42.3.1 test - - com.google.code.gson - gson - 2.8.8 - test - + diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature index 2eac819b..0f13bfe2 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature @@ -14,7 +14,7 @@ # the License. # -@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +@Regression @Sink_Required Feature: CloudSQL-PostgreSQL sink - Verify CloudSQL-PostgreSQL sink plugin design time scenarios Scenario: To verify CloudSQLPostgreSQL sink plugin validation with connection and basic details for connectivity diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature index 6c68180f..98b6f689 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature @@ -14,7 +14,7 @@ # the License. # -@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +@Regression @Sink_Required Feature: CloudSQL-PostgreSQL sink- Verify CloudSQL-PostgreSQL sink plugin design time macro scenarios Scenario: To verify CloudSQLPostgreSQL sink plugin validation with macro enabled fields for connection section diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature index 0eda6558..482ac5cb 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature @@ -1,5 +1,5 @@ # -# Copyright © 2022 Cask Data, Inc. +# Copyright © 2023 Cask Data, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of @@ -14,7 +14,7 @@ # the License. # -@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +@Regression @Sink_Required Feature: CloudSQL-PostgreSQL Sink - Verify CloudSQL-postgreSQL Sink Plugin Error scenarios Scenario:Verify CloudSQLPostgreSQL sink plugin validation errors for mandatory fields diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature index 2216773c..1ee11cbe 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature @@ -14,7 +14,7 @@ # the License. # -@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +@Regression @Sink_Required Feature: CloudSQL-PostgreSQL sink - Verify data transfer from BigQuery source to CloudSQL-PostgreSQL sink @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE @PLUGIN-1526 @@ -50,7 +50,7 @@ Feature: CloudSQL-PostgreSQL sink - Verify data transfer from BigQuery source to Then Save the pipeline Then Preview and run the pipeline Then Verify the preview of pipeline is "success" - Then Click on preview data for CloudSQLPostgreSQL sink + Then Click on the Preview Data link on the Sink plugin node: "CloudSQLPostgreSQL" Then Close the preview data Then Deploy the pipeline Then Run the Pipeline in Runtime @@ -93,7 +93,7 @@ Feature: CloudSQL-PostgreSQL sink - Verify data transfer from BigQuery source to Then Save the pipeline Then Preview and run the pipeline Then Verify the preview of pipeline is "success" - Then Click on preview data for CloudSQLPostgreSQL sink + Then Click on the Preview Data link on the Sink plugin node: "CloudSQLPostgreSQL" Then Close the preview data Then Deploy the pipeline Then Run the Pipeline in Runtime @@ -136,7 +136,7 @@ Feature: CloudSQL-PostgreSQL sink - Verify data transfer from BigQuery source to Then Save the pipeline Then Preview and run the pipeline Then Verify the preview of pipeline is "success" - Then Click on preview data for CloudSQLPostgreSQL sink + Then Click on the Preview Data link on the Sink plugin node: "CloudSQLPostgreSQL" Then Close the preview data Then Deploy the pipeline Then Run the Pipeline in Runtime diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature index cf6a89cb..5bb22887 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature @@ -14,7 +14,7 @@ # the License. # -@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +@Regression @Sink_Required Feature: CloudSQL-PostgreSQL sink - Verify data transfer to PostgreSQL sink with macro arguments @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE @PLUGIN-1629 @PLUGIN-1526 diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature index fdedeaa7..4302bb91 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature @@ -14,7 +14,7 @@ # the License. # -@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +@Regression @Source_Required Feature: CloudSQL-PostgreSQL source - Verify CloudSQLPostgreSQL source plugin design time scenarios @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature index 25e7ac52..c35a4b99 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature @@ -14,7 +14,7 @@ # the License. # -@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +@Regression @Source_Required Feature: CloudSQL-PostgreSQL source - Verify CloudSQL-PostgreSQL source plugin design time macros scenarios Scenario: To verify CloudSQLPostgreSQL source plugin validation with macro enabled fields for connection section diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature index 907906b4..2ccf2a53 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature @@ -14,7 +14,7 @@ # the License. # -@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +@Regression @Source_Required Feature: CloudSQL-postgreSQL source - Verify CloudSQL-postgreSQL source plugin design time validation scenarios Scenario:Verify CloudSQLPostgreSQL source plugin validation errors for mandatory fields diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature index 770541b2..e4a898ca 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature @@ -14,7 +14,7 @@ # the License. # -@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +@Regression @Source_Required Feature: CloudSQL-PostGreSQL Source - Run Time scenarios @CLOUDSQLPOSTGRESQL_SOURCE_TEST @BQ_SINK_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @PLUGIN-1526 @@ -227,7 +227,7 @@ Feature: CloudSQL-PostGreSQL Source - Run Time scenarios Then Save the pipeline Then Preview and run the pipeline Then Verify the preview of pipeline is "success" - Then Click on preview data for CloudSQLPostgreSQL sink + Then Click on the Preview Data link on the Sink plugin node: "CloudSQLPostgreSQL" Then Close the preview data Then Deploy the pipeline Then Run the Pipeline in Runtime diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature index 9729ce1e..8c9fe1f5 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature @@ -14,7 +14,7 @@ # the License. # -@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +@Regression @Source_Required Feature: CloudSQL-PostGreSQL source - Verify CloudSQL-PostGreSQL plugin data transfer with macro arguments @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/CloudSqlPostgreSqlClient.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/CloudSqlPostgreSqlClient.java new file mode 100644 index 00000000..291e1103 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/CloudSqlPostgreSqlClient.java @@ -0,0 +1,169 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package io.cdap.plugin; + +import io.cdap.e2e.utils.PluginPropertyUtils; +import org.junit.Assert; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.TimeZone; + +/** + * CloudSQLPostgreSQL client. + */ + +public class CloudSqlPostgreSqlClient { + public static Connection getCloudSqlConnection() throws ClassNotFoundException, SQLException { + Class.forName("org.postgresql.Driver"); + String database = PluginPropertyUtils.pluginProp("databaseName"); + String instanceConnectionName = System.getenv("CLOUDSQL_POSTGRESQL_CONNECTION_NAME"); + String username = System.getenv("CLOUDSQL_POSTGRESQL_USERNAME"); + String password = System.getenv("CLOUDSQL_POSTGRESQL_PASSWORD"); + String jdbcUrl = String.format(PluginPropertyUtils.pluginProp("URL"), database, instanceConnectionName, username, password); + Connection connection = DriverManager.getConnection(jdbcUrl); + return connection; + } + + public static int countRecord(String table, String schema) throws SQLException, ClassNotFoundException { + String countQuery = "SELECT COUNT(*) as total FROM " + schema + "." + table; + try (Connection connection = getCloudSqlConnection(); + Statement statement = connection.createStatement(); + ResultSet rs = statement.executeQuery(countQuery)) { + int num = 0; + while (rs.next()) { + num = (rs.getInt(1)); + } + return num; + } + } + + /** + * Extracts Result set of source and target table. + * @param sourceTable table at the source side + * @param targetTable table at the sink side + * @return true if the values in source and target side are equal + */ + public static boolean validateRecordValues(String sourceTable, String targetTable, String schema) + throws SQLException, ClassNotFoundException { + String getSourceQuery = "SELECT * FROM " + schema + "." + sourceTable; + String getTargetQuery = "SELECT * FROM " + schema + "." + targetTable; + try (Connection connection = getCloudSqlConnection()) { + connection.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement2 = connection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + ResultSet rsSource = statement1.executeQuery(getSourceQuery); + ResultSet rsTarget = statement2.executeQuery(getTargetQuery); + return compareResultSetData(rsSource, rsTarget); + } + } + + /** + * Compares the result Set data in source table and sink table.. + * @param rsSource result set of the source table data + * @param rsTarget result set of the target table data + * @return true if rsSource matches rsTarget + */ + public static boolean compareResultSetData(ResultSet rsSource, ResultSet rsTarget) throws SQLException { + ResultSetMetaData mdSource = rsSource.getMetaData(); + ResultSetMetaData mdTarget = rsTarget.getMetaData(); + int columnCountSource = mdSource.getColumnCount(); + int columnCountTarget = mdTarget.getColumnCount(); + Assert.assertEquals("Number of columns in source and target are not equal", + columnCountSource, columnCountTarget); + while (rsSource.next() && rsTarget.next()) { + int currentColumnCount = 1; + while (currentColumnCount <= columnCountSource) { + String columnTypeName = mdSource.getColumnTypeName(currentColumnCount); + int columnType = mdSource.getColumnType(currentColumnCount); + String columnName = mdSource.getColumnName(currentColumnCount); + if (columnType == Types.TIMESTAMP) { + GregorianCalendar gc = new GregorianCalendar(TimeZone.getTimeZone("UTC")); + gc.setGregorianChange(new Date(Long.MIN_VALUE)); + Timestamp sourceTS = rsSource.getTimestamp(currentColumnCount, gc); + Timestamp targetTS = rsTarget.getTimestamp(currentColumnCount, gc); + Assert.assertEquals(String.format("Different values found for column : %s", columnName), sourceTS, targetTS); + } else { + String sourceString = rsSource.getString(currentColumnCount); + String targetString = rsTarget.getString(currentColumnCount); + Assert.assertEquals(String.format("Different values found for column : %s", columnName), + sourceString, targetString); + } + currentColumnCount++; + } + } + Assert.assertFalse("Number of rows in Source table is greater than the number of rows in Target table", + rsSource.next()); + Assert.assertFalse("Number of rows in Target table is greater than the number of rows in Source table", + rsTarget.next()); + return true; + } + + public static void createSourceTable(String sourceTable, String schema) throws SQLException, ClassNotFoundException { + try (Connection connection = getCloudSqlConnection(); + Statement statement = connection.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); + String createSourceTableQuery = "CREATE TABLE " + schema + "." + sourceTable + datatypesColumns; + statement.executeUpdate(createSourceTableQuery); + + // Insert dummy data. + String datatypesValues = PluginPropertyUtils.pluginProp("datatypesValues"); + String datatypesColumnsList = PluginPropertyUtils.pluginProp("datatypesColumnsList"); + statement.executeUpdate("INSERT INTO " + schema + "." + sourceTable + " " + datatypesColumnsList + " " + + datatypesValues); + } + } + + public static void createTargetTable(String targetTable, String schema) throws SQLException, ClassNotFoundException { + try (Connection connection = getCloudSqlConnection(); + Statement statement = connection.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); + String createTargetTableQuery = "CREATE TABLE " + schema + "." + targetTable + " " + datatypesColumns; + statement.executeUpdate(createTargetTableQuery); + } + } + + public static void createTargetPostgresqlTable(String targetTable, String schema) throws SQLException, + ClassNotFoundException { + try (Connection connection = getCloudSqlConnection(); + Statement statement = connection.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("bigQueryDatatypesColumns"); + String createTargetTableQuery = "CREATE TABLE " + schema + "." + targetTable + " " + datatypesColumns; + statement.executeUpdate(createTargetTableQuery); + } + } + + public static void dropTables(String[] tables, String schema) throws SQLException, ClassNotFoundException { + try (Connection connection = getCloudSqlConnection(); + Statement statement = connection.createStatement()) { + for (String table : tables) { + String dropTableQuery = "Drop Table " + schema + "." + table; + statement.executeUpdate(dropTableQuery); + } + } + } +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java index 86214b9b..f761dc82 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java @@ -21,6 +21,7 @@ import com.google.gson.JsonObject; import io.cdap.e2e.utils.BigQueryClient; import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.plugin.CloudSqlPostgreSqlClient; import org.apache.spark.sql.types.Decimal; import org.junit.Assert; @@ -28,6 +29,7 @@ import java.math.BigDecimal; import java.math.RoundingMode; import java.sql.*; +import java.text.SimpleDateFormat; import java.time.*; import java.util.Date; import java.text.ParseException; @@ -40,51 +42,52 @@ public class BQValidation { static List BigQueryResponse = new ArrayList<>(); static List bigQueryRows = new ArrayList<>(); + static Gson gson=new Gson(); /** * Extracts entire data from source and target tables. - * * @param sourceTable table at the source side * @param targetTable table at the sink side * @return true if the values in source and target side are equal */ public static boolean validateDBToBQRecordValues(String schema, String sourceTable, String targetTable) - throws SQLException, ClassNotFoundException, IOException, InterruptedException { + throws SQLException, ClassNotFoundException, IOException, InterruptedException, ParseException { getBigQueryTableData(targetTable, bigQueryRows); for (Object rows : bigQueryRows) { - JsonObject json = new Gson().fromJson(String.valueOf(rows), JsonObject.class); + JsonObject json = gson.fromJson(String.valueOf(rows), JsonObject.class); BigQueryResponse.add(json); } String getSourceQuery = "SELECT * FROM " + schema + "." + sourceTable; - try (Connection connect = CloudSqlPostgreSqlClient.getCloudSqlConnection()) { - connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); - Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + try (Connection connection = CloudSqlPostgreSqlClient.getCloudSqlConnection()) { + connection.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, ResultSet.HOLD_CURSORS_OVER_COMMIT); ResultSet rsSource = statement1.executeQuery(getSourceQuery); return compareResultSetandJsonData(rsSource, BigQueryResponse); } } + public static boolean validateBQToDBRecordValues(String schema, String sourceTable, String targetTable) - throws SQLException, ClassNotFoundException, IOException, InterruptedException { + throws SQLException, ClassNotFoundException, IOException, InterruptedException, ParseException { getBigQueryTableData(sourceTable, bigQueryRows); for (Object rows : bigQueryRows) { - JsonObject json = new Gson().fromJson(String.valueOf(rows), JsonObject.class); + JsonObject json = gson.fromJson(String.valueOf(rows), JsonObject.class); BigQueryResponse.add(json); } String getTargetQuery = "SELECT * FROM " + schema + "." + targetTable; - try (Connection connect = CloudSqlPostgreSqlClient.getCloudSqlConnection()) { - connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); - Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + try (Connection connection = CloudSqlPostgreSqlClient.getCloudSqlConnection()) { + connection.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, ResultSet.HOLD_CURSORS_OVER_COMMIT); ResultSet rsTarget = statement1.executeQuery(getTargetQuery); return compareResultSetandJsonData(rsTarget, BigQueryResponse); } } + /** * Retrieves the data from a specified BigQuery table and populates it into the provided list of objects. - * * @param table The name of the BigQuery table to fetch data from. * @param bigQueryRows The list to store the fetched BigQuery data. */ @@ -100,7 +103,6 @@ private static void getBigQueryTableData(String table, List bigQueryRows /** * Compares the data in the result set obtained from the Oracle database with the provided BigQuery JSON objects. - * * @param rsSource The result set obtained from the Oracle database. * @param bigQueryData The list of BigQuery JSON objects to compare with the result set data. * @return True if the result set data matches the BigQuery data, false otherwise. @@ -108,7 +110,7 @@ private static void getBigQueryTableData(String table, List bigQueryRows * @throws ParseException If an error occurs while parsing the data. */ public static boolean compareResultSetandJsonData(ResultSet rsSource, List bigQueryData) - throws SQLException { + throws SQLException, ParseException { ResultSetMetaData mdSource = rsSource.getMetaData(); boolean result = false; int columnCountSource = mdSource.getColumnCount(); @@ -161,6 +163,11 @@ public static boolean compareResultSetandJsonData(ResultSet rsSource, List