diff --git a/src/e2e-test/features/bigquery/sink/BigQuerySinkError.feature b/src/e2e-test/features/bigquery/sink/BigQuerySinkError.feature index dd4c331f2..c9f058ff4 100644 --- a/src/e2e-test/features/bigquery/sink/BigQuerySinkError.feature +++ b/src/e2e-test/features/bigquery/sink/BigQuerySinkError.feature @@ -62,3 +62,16 @@ Feature: BigQuery sink - Validate BigQuery sink plugin error scenarios Then Enter BigQuery sink property table name Then Enter BigQuery property temporary bucket name "bqInvalidTemporaryBucket" Then Verify the BigQuery validation error message for invalid property "bucket" + + @BQ_SINK_TEST + Scenario:Verify BigQuery Sink properties validation errors for incorrect value of reference name + Given Open Datafusion Project to configure pipeline + When Sink is BigQuery + Then Open BigQuery sink properties + Then Enter BigQuery property reference name "bqInvalidRefName" + Then Enter BigQuery property projectId "projectId" + Then Enter BigQuery property datasetProjectId "projectId" + Then Override Service account details if set in environment variables + Then Enter BigQuery property dataset "dataset" + Then Enter BigQuery sink property table name + Then Verify the BigQuery validation error message for reference name "referenceName" \ No newline at end of file diff --git a/src/e2e-test/features/bigquery/sink/BigQueryToBigQuerySink.feature b/src/e2e-test/features/bigquery/sink/BigQueryToBigQuerySink.feature index 6a1afdfcf..25311694e 100644 --- a/src/e2e-test/features/bigquery/sink/BigQueryToBigQuerySink.feature +++ b/src/e2e-test/features/bigquery/sink/BigQueryToBigQuerySink.feature @@ -345,3 +345,449 @@ Feature: BigQuery sink - Verification of BigQuery to BigQuery successful data tr Then Close the pipeline logs Then Verify the pipeline status is "Succeeded" Then Validate the values of records transferred to BQ sink is equal to the values from source BigQuery table + + @BQ_INSERT_SOURCE_TEST @BQ_SINK_TEST + Scenario:Validate successful records transfer from BigQuery to BigQuery with Advanced operations Upsert + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Replace input plugin property: "project" with value: "projectId" + Then Override Service account details if set in environment variables + And Replace input plugin property: "datasetProject" with value: "datasetprojectId" + And Replace input plugin property: "referenceName" with value: "reference" + And Replace input plugin property: "dataset" with value: "dataset" + And Replace input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Replace input plugin property: "project" with value: "projectId" + Then Override Service account details if set in environment variables + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + And Select radio button plugin property: "operation" with value: "upsert" + Then Click plugin property: "updateTableSchema" + Then Click on the Add Button of the property: "relationTableKey" with value: + | TableKey | + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Close the pipeline logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to BQ sink is equal to the values from source BigQuery table + + @BQ_INSERT_SOURCE_TEST @BQ_SINK_TEST @EXISTING_BQ_CONNECTION @BigQuery_Sink_Required @ITN_TEST + Scenario:Verify and validate a BigQuery source to BigQuery sink end to end test case Time Partitioning Type functionality using all Daily + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Click plugin property: "switch-useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Click on the Browse button inside plugin properties + Then Select connection data row with name: "dataset" + Then Select connection data row with name: "bqSourceTable" + Then Wait till connection data loading completes with a timeout of 60 seconds + Then Verify input plugin property: "dataset" contains value: "dataset" + Then Verify input plugin property: "table" contains value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName" + Then Click on the Browse button inside plugin properties + Then Click SELECT button inside connection data row with name: "dataset" + Then Wait till connection data loading completes with a timeout of 60 seconds + Then Verify input plugin property: "dataset" contains value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + And Select radio button plugin property: "operation" with value: "upsert" + And Select radio button plugin property: "timePartitioningType" with value: "DAY" + Then Click on the Add Button of the property: "relationTableKey" with value: + | TableKey | + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Close the pipeline logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to BQ sink is equal to the values from source BigQuery table + + @BQ_INSERT_SOURCE_TEST @BQ_SINK_TEST @EXISTING_BQ_CONNECTION @BigQuery_Sink_Required @ITN_TEST + Scenario:Verify and validate a BigQuery source to BigQuery sink end to end test case Time Partitioning Type functionality using all Hourly + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Click plugin property: "switch-useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Click on the Browse button inside plugin properties + Then Select connection data row with name: "dataset" + Then Select connection data row with name: "bqSourceTable" + Then Wait till connection data loading completes with a timeout of 60 seconds + Then Verify input plugin property: "dataset" contains value: "dataset" + Then Verify input plugin property: "table" contains value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName" + Then Click on the Browse button inside plugin properties + Then Click SELECT button inside connection data row with name: "dataset" + Then Wait till connection data loading completes with a timeout of 60 seconds + Then Verify input plugin property: "dataset" contains value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + And Select radio button plugin property: "operation" with value: "upsert" + And Select radio button plugin property: "timePartitioningType" with value: "HOUR" + Then Click on the Add Button of the property: "relationTableKey" with value: + | TableKey | + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Close the pipeline logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to BQ sink is equal to the values from source BigQuery table + + @BQ_INSERT_SOURCE_TEST @BQ_SINK_TEST @EXISTING_BQ_CONNECTION @BigQuery_Sink_Required @ITN_TEST + Scenario:Verify and validate a BigQuery source to BigQuery sink end to end test case Time Partitioning Type functionality using all Monthly + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Click plugin property: "switch-useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Click on the Browse button inside plugin properties + Then Select connection data row with name: "dataset" + Then Select connection data row with name: "bqSourceTable" + Then Wait till connection data loading completes with a timeout of 60 seconds + Then Verify input plugin property: "dataset" contains value: "dataset" + Then Verify input plugin property: "table" contains value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName" + Then Click on the Browse button inside plugin properties + Then Click SELECT button inside connection data row with name: "dataset" + Then Wait till connection data loading completes with a timeout of 60 seconds + Then Verify input plugin property: "dataset" contains value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + And Select radio button plugin property: "operation" with value: "upsert" + And Select radio button plugin property: "timePartitioningType" with value: "MONTH" + Then Click on the Add Button of the property: "relationTableKey" with value: + | TableKey | + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Close the pipeline logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to BQ sink is equal to the values from source BigQuery table + + @BQ_INSERT_SOURCE_TEST @BQ_SINK_TEST @EXISTING_BQ_CONNECTION @BigQuery_Sink_Required @ITN_TEST + Scenario:Verify and validate a BigQuery source to BigQuery sink end to end test case Time Partitioning Type functionality using all Yearly + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Click plugin property: "switch-useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Click on the Browse button inside plugin properties + Then Select connection data row with name: "dataset" + Then Select connection data row with name: "bqSourceTable" + Then Wait till connection data loading completes with a timeout of 60 seconds + Then Verify input plugin property: "dataset" contains value: "dataset" + Then Verify input plugin property: "table" contains value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName" + Then Click on the Browse button inside plugin properties + Then Click SELECT button inside connection data row with name: "dataset" + Then Wait till connection data loading completes with a timeout of 60 seconds + Then Verify input plugin property: "dataset" contains value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + And Select radio button plugin property: "operation" with value: "upsert" + And Select radio button plugin property: "timePartitioningType" with value: "YEAR" + Then Click on the Add Button of the property: "relationTableKey" with value: + | TableKey | + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Close the pipeline logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to BQ sink is equal to the values from source BigQuery table + + @BQ_SOURCE_DATATYPE_TEST @BQ_SINK_TEST + Scenario:Validate successful records transfer from BigQuery to BigQuery with JSON String order functionality + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + Then Override Service account details if set in environment variables + Then Override Service account details if set in environment variables + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Replace input plugin property: "dataset" with value: "dataset" + And Replace input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Replace input plugin property: "project" with value: "projectId" + Then Override Service account details if set in environment variables + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click on the Add Button of the property: "jsonStringFields" with value: + | jsonStringValue | + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + + @BQ_INSERT_SOURCE_TEST @BQ_SINK_TEST @EXISTING_BQ_CONNECTION @BigQuery_Sink_Required @ITN_TEST + Scenario:Json String + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Click plugin property: "switch-useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Click on the Browse button inside plugin properties + Then Select connection data row with name: "dataset" + Then Select connection data row with name: "bqSourceTable" + Then Wait till connection data loading completes with a timeout of 60 seconds + Then Verify input plugin property: "dataset" contains value: "dataset" + Then Verify input plugin property: "table" contains value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName" + Then Click on the Browse button inside plugin properties + Then Click SELECT button inside connection data row with name: "dataset" + Then Wait till connection data loading completes with a timeout of 60 seconds + Then Verify input plugin property: "dataset" contains value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + And Select radio button plugin property: "operation" with value: "upsert" + And Select radio button plugin property: "timePartitioningType" with value: "YEAR" + Then Click on the Add Button of the property: "relationTableKey" with value: + | TableKey | + Then Click on the Add Button of the property: "jsonStringFields" with value: + | jsonStringValue | + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Close the pipeline logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to BQ sink is equal to the values from source BigQuery table + + @BQ_SOURCE_DATATYPE_TEST @BQ_SINK_TEST + Scenario:Validate successful records transfer from BigQuery to BigQuery with Json String + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "projectId" + And Replace input plugin property: "dataset" with value: "dataset" + Then Override Service account details if set in environment variables + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Replace input plugin property: "project" with value: "projectId" + Then Override Service account details if set in environment variables + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "updateTableSchema" + Then Click on the Add Button of the property: "jsonStringFields" with value: + | jsonStringValue | + Then Enter BigQuery sink property partition field "bqPartitionFieldTime" + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Verify the partition table is created with partitioned on field "bqPartitionFieldTime" + + @BQ_SOURCE_DATATYPE_TEST @BQ_SINK_TEST + Scenario:Validate successful records transfer from BigQuery to BigQuery with BQ Job Labels with Key and Value pairs with BQ execute plugin + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "projectId" + And Replace input plugin property: "dataset" with value: "dataset" + Then Override Service account details if set in environment variables + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Replace input plugin property: "project" with value: "projectId" + Then Override Service account details if set in environment variables + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "updateTableSchema" + Then Click on the Add Button of the property: "jobLabels" with value: + | jobLabelKey | + Then Click on the Add Button of the property: "jobLabels" with value: + | jobLabelValue | + Then Enter BigQuery sink property partition field "bqPartitionFieldTime" + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Verify the partition table is created with partitioned on field "bqPartitionFieldTime" + diff --git a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java index d4ae865c8..d3701adda 100644 --- a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java +++ b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java @@ -260,4 +260,163 @@ public void validateRecordsTransferredToTargetTableIsEqualToNumberOfRecordsFromS BeforeActions.scenario.write("Number of records transferred from source table to target table:" + count); Assert.assertEquals(count, countRecordsTarget); } + + @Then("Enter BigQuery source properties partitionFrom and partitionTo") + public void enterBigQuerySourcePropertiespartitionFromandpartitionTo() throws IOException { + CdfBigQueryPropertiesActions.enterPartitionStartDate(new SimpleDateFormat("dd-MM-yyyy").format(new Date())); + CdfBigQueryPropertiesActions.enterPartitionEndDate(new SimpleDateFormat("dd-MM-yyyy") + .format(DateUtils.addDays(new Date(), 1))); + } + @Then("Validate BigQuery source incorrect property error for Partition Start date {string} value {string}") + public void validateBigQuerySourceIncorrectErrorFor(String property, String value) { + CdfBigQueryPropertiesActions.getSchema(); + + + SeleniumHelper.waitElementIsVisible(CdfBigQueryPropertiesLocators.getSchemaButton, 5L); + String tableFullName = StringUtils.EMPTY; + if (property.equalsIgnoreCase("dataset")) { + tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + PluginPropertyUtils.pluginProp(value) + + "." + TestSetupHooks.bqSourceTable; + } else if (property.equalsIgnoreCase("table")) { + tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + + PluginPropertyUtils.pluginProp("dataset") + + "." + PluginPropertyUtils.pluginProp(value); + } else if (property.equalsIgnoreCase("datasetProject")) { + tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("dataset") + + "." + TestSetupHooks.bqSourceTable; + + }else if (property.equalsIgnoreCase("partitionFrom")) { + tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + + PluginPropertyUtils.pluginProp("dataset") + + "." + PluginPropertyUtils.pluginProp(value);} + + String expectedErrorMessage = PluginPropertyUtils.errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_PARTITIONSTARTDATE) + .replaceAll("TABLENAME", tableFullName); + String actualErrorMessage = PluginPropertyUtils.findPropertyErrorElement("partitionFrom").getText(); + System.out.println(actualErrorMessage); + Assert.assertEquals("Error message mismatch for Partition Start Date", expectedErrorMessage, actualErrorMessage); + String actualColor = PluginPropertyUtils.getErrorColor(PluginPropertyUtils.findPropertyErrorElement("partitionFrom")); + String expectedColor = ConstantsUtil.ERROR_MSG_COLOR; + Assert.assertEquals(expectedColor, actualColor); + } + + @Then("Validate BigQuery source incorrect property error for Partition End date {string} value {string}") + public void validateBigQuerySourceIncorrectPartitionenddateErrorFor(String property, String value) { + CdfBigQueryPropertiesActions.getSchema(); + SeleniumHelper.waitElementIsVisible(CdfBigQueryPropertiesLocators.getSchemaButton, 5L); + String tableFullName = StringUtils.EMPTY; + if (property.equalsIgnoreCase("dataset")) { + tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + PluginPropertyUtils.pluginProp(value) + + "." + TestSetupHooks.bqSourceTable; + } else if (property.equalsIgnoreCase("table")) { + tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + + PluginPropertyUtils.pluginProp("dataset") + + "." + PluginPropertyUtils.pluginProp(value); + } else if (property.equalsIgnoreCase("datasetProjectId")) { + tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("dataset") + + "." + TestSetupHooks.bqSourceTable; + }else if (property.equalsIgnoreCase("partitionEndDate")) { + tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + + PluginPropertyUtils.pluginProp("partitionTo") + + "." + TestSetupHooks.bqSourceTable; + } + + String expectedErrorMessage = PluginPropertyUtils.errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_PARTITIONENDDATE) + .replaceAll("TABLENAME", tableFullName); + String actualErrorMessage = PluginPropertyUtils.findPropertyErrorElement("partitionTo").getText(); + System.out.println(actualErrorMessage); + Assert.assertEquals("Error message mismatch for Partition End Date", expectedErrorMessage, actualErrorMessage); + String actualColor = PluginPropertyUtils.getErrorColor(PluginPropertyUtils.findPropertyErrorElement("partitionTo")); + String expectedColor = ConstantsUtil.ERROR_MSG_COLOR; + Assert.assertEquals(expectedColor, actualColor); + } + + @Then("Enter BigQuery source properties referenceName") + public void EnterBigQuerysourcepropertiesreferenceName() throws IOException { + CdfBigQueryPropertiesActions.enterBigQueryReferenceName("#$%^"); + + } + + @Then("Validate BigQuery source incorrect property error for reference name{string} value {string}") + public void validateBigQuerySourceIncorrectPropertyErrorForreferncename(String property, String value) { + CdfBigQueryPropertiesActions.getSchema(); + SeleniumHelper.waitElementIsVisible(CdfBigQueryPropertiesLocators.getSchemaButton, 5L); + String tableFullName = StringUtils.EMPTY; + if (property.equalsIgnoreCase("dataset")) { + tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + PluginPropertyUtils.pluginProp(value) + + "." + TestSetupHooks.bqSourceTable; + } else if (property.equalsIgnoreCase("table")) { + tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + + PluginPropertyUtils.pluginProp("dataset") + + "." + PluginPropertyUtils.pluginProp(value); + } else if (property.equalsIgnoreCase("datasetProject")) { + tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("dataset") + + "." + TestSetupHooks.bqSourceTable; + } + else if (property.equalsIgnoreCase("referenceName")) { + tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("reference") + + "." + TestSetupHooks.bqSourceTable; + } + String expectedErrorMessage = PluginPropertyUtils.errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_REFERENCENAME) + .replaceAll("TABLENAME", tableFullName); + String actualErrorMessage = PluginPropertyUtils.findPropertyErrorElement("referenceName").getText(); + + Assert.assertEquals(expectedErrorMessage, actualErrorMessage); + String actualColor = PluginPropertyUtils.getErrorColor(PluginPropertyUtils.findPropertyErrorElement("referenceName")); + String expectedColor = ConstantsUtil.ERROR_MSG_COLOR; + Assert.assertEquals(expectedColor, actualColor); + + } + + @Then("Enter BigQuery source properties filter") + public void EnterBigQuerysourcepropertiesfilter() throws IOException { + CdfBigQueryPropertiesActions.enterFilter("%%%%"); + + } + + @Then("Enter BigQuery source property output schema {string} as macro argument {string}") + public void enterBigQueryPropertyAsMacroArgumentoutputschema(String pluginProperty, String macroArgument) { + SCHEMA_LOCATORS.schemaActions.click(); + SCHEMA_LOCATORS.schemaActionType("macro").click(); + WaitHelper.waitForElementToBeHidden(SCHEMA_LOCATORS.schemaActionType("macro"), 5); + try { + enterMacro(CdfPluginPropertyLocator.fromPropertyString(pluginProperty).pluginProperty, macroArgument); + } catch (NullPointerException e) { + Assert.fail("CDF_PLUGIN_PROPERTY_MAPPING for '" + pluginProperty + "' not present in CdfPluginPropertyLocator."); + } + } + + @Then("Enter BigQuery property reference name {string}") + public void EnterBigQuerypropertyreferencename(String referenceName) throws IOException { + CdfBigQueryPropertiesActions.enterBigQueryReferenceName(PluginPropertyUtils.pluginProp(referenceName)); + } + + @Then("Verify the BigQuery validation error message for reference name {string}") + public void VerifytheBigQueryvalidationerrormessageforreferencename (String property) { + CdfStudioActions.clickValidateButton(); + String expectedErrorMessage; + if (property.equalsIgnoreCase("gcsChunkSize")) { + expectedErrorMessage = PluginPropertyUtils + .errorProp(E2ETestConstants.ERROR_MSG_BQ_INCORRECT_CHUNKSIZE); + } else if (property.equalsIgnoreCase("bucket")) { + expectedErrorMessage = PluginPropertyUtils + .errorProp(E2ETestConstants.ERROR_MSG_BQ_INCORRECT_TEMPORARY_BUCKET); + } else if (property.equalsIgnoreCase("table")) { + expectedErrorMessage = PluginPropertyUtils + .errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_TABLE_NAME); + } else if (property.equalsIgnoreCase("referenceName")) { + expectedErrorMessage = PluginPropertyUtils + .errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_REFERENCENAME); + } else { + expectedErrorMessage = PluginPropertyUtils.errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_REFERENCENAME). + replaceAll("referenceName", property.substring(0, 1).toUpperCase() + property.substring(1)); + } + String actualErrorMessage = PluginPropertyUtils.findPropertyErrorElement("referenceName").getText(); + System.out.println(actualErrorMessage); + Assert.assertEquals(expectedErrorMessage, actualErrorMessage); + String actualColor = PluginPropertyUtils.getErrorColor(PluginPropertyUtils.findPropertyErrorElement("referenceName")); + String expectedColor = ConstantsUtil.ERROR_MSG_COLOR; + Assert.assertEquals(expectedColor, actualColor); + } + } diff --git a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQuerySink.java b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQuerySink.java index 940ff7b89..251ab2ede 100644 --- a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQuerySink.java +++ b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQuerySink.java @@ -24,8 +24,11 @@ import io.cdap.plugin.utils.E2EHelper; import io.cucumber.java.en.Then; import io.cucumber.java.en.When; +import org.apache.commons.lang3.time.DateUtils; import java.io.IOException; +import java.text.SimpleDateFormat; +import java.util.Date; import java.util.UUID; /** @@ -54,8 +57,14 @@ public void toggleBigQuerySinkPropertyTruncateTableToTrue() { CdfBigQueryPropertiesActions.clickTruncatableSwitch(); } + @Then("Toggle BigQuery sink property reuire partition filter to true") + public void toggleBigQuerysinkpropertyreuirepartitionfiltertotrue() { + CdfBigQueryPropertiesActions.toggleRequirePartitionFilter(); + } + @Then("Toggle BigQuery sink property updateTableSchema to true") public void toggleBigQuerySinkPropertyUpdateTableSchemaToTrue() { + CdfBigQueryPropertiesActions.clickUpdateTable(); } diff --git a/src/e2e-test/java/io/cdap/plugin/utils/E2ETestConstants.java b/src/e2e-test/java/io/cdap/plugin/utils/E2ETestConstants.java index 4fb86da3b..8d037e5a9 100644 --- a/src/e2e-test/java/io/cdap/plugin/utils/E2ETestConstants.java +++ b/src/e2e-test/java/io/cdap/plugin/utils/E2ETestConstants.java @@ -17,4 +17,9 @@ public class E2ETestConstants { public static final String ERROR_MSG_BQ_INCORRECT_CHUNKSIZE = "errorMessageIncorrectBQChunkSize"; public static final String ERROR_MSG_BQ_INCORRECT_TEMPORARY_BUCKET = "errorMessageIncorrectBQBucketName"; public static final String ERROR_MSG_BQ_INCORRECT_PROPERTY = "errorMessageIncorrectBQProperty"; + public static final String ERROR_MSG_INCORRECT_PARTITIONSTARTDATE= "errorMessageIncorrectPartitionStartDate"; + public static final String ERROR_MSG_INCORRECT_PARTITIONENDDATE= "errorMessageIncorrectPartitionEndDate"; + public static final String ERROR_MSG_INCORRECT_REFERENCENAME= "errorMessageInvalidReferenceName"; + public static final String ERROR_MSG_INCORRECT_FILTER= "errorMessageIncorrectRegexPathFilter"; + public static final String JSON_STRING_VALUE ="jsonStringValue"; } diff --git a/src/e2e-test/resources/errorMessage.properties b/src/e2e-test/resources/errorMessage.properties index bd8a1610b..14d73c4ce 100644 --- a/src/e2e-test/resources/errorMessage.properties +++ b/src/e2e-test/resources/errorMessage.properties @@ -33,4 +33,8 @@ errorMessageMultipleFileWithoutClearDefaultSchema=Found a row with 4 fields when errorMessageInvalidSourcePath=Invalid bucket name in path 'abc@'. Bucket name should errorMessageInvalidDestPath=Invalid bucket name in path 'abc@'. Bucket name should errorMessageInvalidEncryptionKey=CryptoKeyName.parse: formattedString not in valid format: Parameter "abc@" must be +errorMessageIncorrectPartitionStartDate=08-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd +errorMessageIncorrectPartitionEndDate=09-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd +errorMessageInvalidReferenceName=Invalid reference name 'invalidRef&^*&&*'. Supported characters are: letters, numbers, and '_', '-', '.', or '$'. +errorLogsMessageInvalidFilter=Spark Program 'phase-1' failed. diff --git a/src/e2e-test/resources/pluginParameters.properties b/src/e2e-test/resources/pluginParameters.properties index aae33e0e8..0ebe1fd5a 100644 --- a/src/e2e-test/resources/pluginParameters.properties +++ b/src/e2e-test/resources/pluginParameters.properties @@ -179,6 +179,9 @@ bqFuturePartitionEndDate=2099-10-02 bqTruncateTableTrue=True bqUpdateTableSchemaTrue=True clusterValue=transaction_date +jsonStringValue=transaction_uid.field +jobLabelKey=transaction_uid +jobLabelValue=transaction_uid:redis TableKey=PersonID bqSourceTable=dummy bqCreateTableQueryFile=testdata/BigQuery/BigQueryCreateTableQuery.txt @@ -193,6 +196,7 @@ bqSourceSchemaDatatype=[{"key":"transaction_info","value":"boolean"},{"key":"tra {"key":"difference","value":"array"},{"key":"Userdata","value":"record"}] bqPartitionField=Month_of_Joining bqPartitionFieldTime=transaction_date +partitionFilter=Customer_Exists bqRangeStart=1 bqRangeEnd=10 bqRangeInterval=2 @@ -214,7 +218,7 @@ bqUpdateDedupeExpectedFile=testdata/BigQuery/BQUpdateDedupeFile bqInsertExpectedFile=testdata/BigQuery/BQInsertIntFile relationTableKeyValue=Name dedupeByOrder=ASC -dedupeByValue=ID +dedupeByValue=transaction_time dedupeByValueUpsert=Price rangeStartValue=2 rangeEndValue=3 @@ -231,7 +235,7 @@ bqDifferentRecordFile=testdata/BigQuery/BQDifferentRecordNameFile bqDateExpectedFile=testdata/BigQuery/BQDateFile bqDateTimeExpectedFile=testdata/BigQuery/BQDateTimeFile bqTimeStampExpectedFile=testdata/BigQuery/BQTimeStampFile -bqPartitionFieldDate=transaction_date +bqPartitionFieldDate=transaction_date,transaction_time ## BIGQUERY-PLUGIN-PROPERTIES-END ## PUBSUBSINK-PLUGIN-PROPERTIES-START @@ -243,6 +247,7 @@ pubSubErrorThreshold=0 pubSubStringValue=one pubSubNegativeValue=-100 pubsubDelimiter=@ +JasonString=name.first ## PUBSUBSINK-PLUGIN-PROPERTIES-END ## GCSDELETE-PLUGIN-PROPERTIES-START @@ -331,7 +336,8 @@ bqExecuteDMLUpsert=MERGE `PROJECT_NAME.DATASET.TABLENAME` as T \ UPDATE SET UID = 'UPDATED RECORD' \ WHEN NOT MATCHED THEN \ INSERT ROW -bqExecuteCountDMLUpsertInsert=SELECT COUNT(*) FROM `PROJECT_NAME.DATASET.TABLENAME` WHERE Id=101 AND Value=5000 AND UID='INSERTED RECORD' +bqExecuteCountDMLUpsertInsert=SELECT COUNT(*) FROM `PROJECT_NAME.\ + .TABLENAME` WHERE Id=101 AND Value=5000 AND UID='INSERTED RECORD' bqExecuteCountDMLUpsertUpdate=SELECT COUNT(*) FROM `PROJECT_NAME.DATASET.TABLENAME` WHERE Id=101 AND Value=5000 AND UID='UPDATED RECORD' bqExecuteInsertFile=testdata/BQExecute/BQExecuteInsertFile ## BQEXECUTE-PLUGIN-PROPERTIES-END