diff --git a/oracle-plugin/src/e2e-test/features/sink/OracleDesignTimeValidation.feature b/oracle-plugin/src/e2e-test/features/sink/OracleDesignTimeValidation.feature index 5b69fbbb2..d9cb71e38 100644 --- a/oracle-plugin/src/e2e-test/features/sink/OracleDesignTimeValidation.feature +++ b/oracle-plugin/src/e2e-test/features/sink/OracleDesignTimeValidation.feature @@ -262,3 +262,51 @@ Feature: Oracle sink- Verify Oracle sink plugin design time validation scenarios Then Click on the Validate button Then Verify that the Plugin is displaying an error message: "blank.HostBlank.message" on the header + @ORACLE_SOURCE_DATATYPES_TEST @ORACLE_TARGET_DATATYPES_TEST @Oracle_Required + Scenario Outline: To verify Oracle sink plugin validation error message for update, upsert operation name and table key + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "Oracle" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "Oracle" from the plugins list as: "Sink" + Then Connect plugins: "Oracle" and "Oracle2" to establish connection + Then Navigate to the properties page of plugin: "Oracle" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "normal" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputDatatypesSchema" + Then Validate "Oracle" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "Oracle2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "normal" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "normal" + Then Select radio button plugin property: "operationName" with value: "" + Then Click on the Validate button + Then Verify that the Plugin Property: "operationName" is displaying an in-line error message: "errorMessageUpdateUpsertOperationName" + Then Verify that the Plugin Property: "relationTableKey" is displaying an in-line error message: "errorMessageUpdateUpsertOperationName" + Examples: + | options | + | upsert | + | update | diff --git a/oracle-plugin/src/e2e-test/features/sink/OracleRunTime.feature b/oracle-plugin/src/e2e-test/features/sink/OracleRunTime.feature index 8bd0cd536..7faae82a4 100644 --- a/oracle-plugin/src/e2e-test/features/sink/OracleRunTime.feature +++ b/oracle-plugin/src/e2e-test/features/sink/OracleRunTime.feature @@ -272,3 +272,111 @@ Feature: Oracle - Verify data transfer from BigQuery source to Oracle sink Then Verify the pipeline status is "Succeeded" Then Validate records transferred to target table with record counts of BigQuery table Then Validate the values of records transferred to target Oracle table is equal to the values from source BigQuery table + + @ORACLE_SOURCE_DATATYPES_TEST @ORACLE_TARGET_DATATYPES_TEST @Oracle_Required + Scenario Outline: To verify pipeline preview failed with invalid table key + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "Oracle" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "Oracle" from the plugins list as: "Sink" + Then Connect plugins: "Oracle" and "Oracle2" to establish connection + Then Navigate to the properties page of plugin: "Oracle" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "normal" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputDatatypesSchema" + Then Validate "Oracle" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "Oracle2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "normal" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "normal" + Then Select radio button plugin property: "operationName" with value: "" + Then Click on the Add Button of the property: "relationTableKey" with value: + | invalidOracleTableKey | + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "Failed" + Examples: + | options | + | upsert | + | update | + + @BQ_SOURCE_TEST @ORACLE_TEST_TABLE @Oracle_Required + Scenario Outline: To verify data is getting transferred from BigQuery to Oracle successfully using upsert,update operation with table key + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "Oracle" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "Oracle" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputDatatypesSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "Oracle" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "normal" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "normal" + Then Select radio button plugin property: "operationName" with value: "" + Then Click on the Add Button of the property: "relationTableKey" with value: + | oracleTableKey | + Then Validate "Oracle" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on the Preview Data link on the Sink plugin node: "Oracle" + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target Oracle table is equal to the values from source BigQuery table + Examples: + | options | + | upsert | + | update | diff --git a/oracle-plugin/src/e2e-test/features/sink/OracleRunTimeMacro.feature b/oracle-plugin/src/e2e-test/features/sink/OracleRunTimeMacro.feature index 218a1a29e..7727a23cb 100644 --- a/oracle-plugin/src/e2e-test/features/sink/OracleRunTimeMacro.feature +++ b/oracle-plugin/src/e2e-test/features/sink/OracleRunTimeMacro.feature @@ -90,7 +90,7 @@ Feature: Oracle - Verify data transfer to Oracle sink with macro arguments Then Validate the values of records transferred to target Oracle table is equal to the values from source BigQuery table @ORACLE_SOURCE_DATATYPES_TEST @ORACLE_TARGET_DATATYPES_TEST @Oracle_Required - Scenario: To verify data is getting transferred from Oracle to Oracle successfully with connection argument, transaction isolation macro enabled + Scenario: To verify data is getting transferred from Oracle to Oracle successfully with connection argument, transaction isolation, operationName macro enabled Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "Oracle" from the plugins list as: "Source" @@ -126,12 +126,16 @@ Feature: Oracle - Verify data transfer to Oracle sink with macro arguments Then Select radio button plugin property: "role" with value: "normal" Then Click on the Macro button of Property: "connectionArguments" and set the value to: "connArgumentsSink" Then Click on the Macro button of Property: "transactionIsolationLevel" and set the value to: "transactionIsolationLevel" + Then Click on the Macro button of Property: "operationName" and set the value to: "oracleOperationName" + Then Click on the Macro button of Property: "relationTableKey" and set the value to: "oracleTableKey" Then Validate "Oracle2" plugin properties Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline Then Enter runtime argument value "connectionArguments" for key "connArgumentsSink" Then Enter runtime argument value "transactionIsolationLevel" for key "transactionIsolationLevel" + Then Enter runtime argument value "upsertOperationName" for key "oracleOperationName" + Then Enter runtime argument value "upsertRelationTableKey" for key "oracleTableKey" Then Run the preview of pipeline with runtime arguments Then Wait till pipeline preview is in running state Then Open and capture pipeline preview logs @@ -142,6 +146,8 @@ Feature: Oracle - Verify data transfer to Oracle sink with macro arguments Then Run the Pipeline in Runtime Then Enter runtime argument value "connectionArguments" for key "connArgumentsSink" Then Enter runtime argument value "transactionIsolationLevel" for key "transactionIsolationLevel" + Then Enter runtime argument value "upsertOperationName" for key "oracleOperationName" + Then Enter runtime argument value "upsertRelationTableKey" for key "oracleTableKey" Then Run the Pipeline in Runtime with runtime arguments Then Wait till pipeline is in running state Then Open and capture logs @@ -149,3 +155,79 @@ Feature: Oracle - Verify data transfer to Oracle sink with macro arguments Then Close the pipeline logs Then Validate the values of records transferred to target table is equal to the values from source table + @BQ_SOURCE_TEST @ORACLE_TEST_TABLE @Oracle_Required + Scenario: To verify data is getting transferred from BigQuery source to Oracle sink using macro arguments for operation name + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "Oracle" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "Oracle" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId" + Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId" + Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" + Then Click on the Macro button of Property: "table" and set the value to: "bqTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "Oracle" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "oracleDriverName" + Then Click on the Macro button of Property: "host" and set the value to: "oracleHost" + Then Click on the Macro button of Property: "port" and set the value to: "oraclePort" + Then Click on the Macro button of Property: "user" and set the value to: "oracleUsername" + Then Click on the Macro button of Property: "password" and set the value to: "oraclePassword" + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "normal" + Then Click on the Macro button of Property: "database" and set the value to: "oracleDatabaseName" + Then Click on the Macro button of Property: "tableName" and set the value to: "oracleTableName" + Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "oracleSchemaName" + Then Click on the Macro button of Property: "operationName" and set the value to: "oracleOperationName" + Then Click on the Macro button of Property: "relationTableKey" and set the value to: "oracleTableKey" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Validate "Oracle" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqSourceTable" for key "bqTable" + Then Enter runtime argument value "driverName" for key "oracleDriverName" + Then Enter runtime argument value from environment variable "host" for key "oracleHost" + Then Enter runtime argument value from environment variable "port" for key "oraclePort" + Then Enter runtime argument value from environment variable "username" for key "oracleUsername" + Then Enter runtime argument value from environment variable "password" for key "oraclePassword" + Then Enter runtime argument value "databaseName" for key "oracleDatabaseName" + Then Enter runtime argument value "targetTable" for key "oracleTableName" + Then Enter runtime argument value "schema" for key "oracleSchemaName" + Then Enter runtime argument value "operationName" for key "oracleOperationName" + Then Enter runtime argument value "relationTableKey" for key "oracleTableKey" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqSourceTable" for key "bqTable" + Then Enter runtime argument value "driverName" for key "oracleDriverName" + Then Enter runtime argument value from environment variable "host" for key "oracleHost" + Then Enter runtime argument value from environment variable "port" for key "oraclePort" + Then Enter runtime argument value from environment variable "username" for key "oracleUsername" + Then Enter runtime argument value from environment variable "password" for key "oraclePassword" + Then Enter runtime argument value "databaseName" for key "oracleDatabaseName" + Then Enter runtime argument value "targetTable" for key "oracleTableName" + Then Enter runtime argument value "schema" for key "oracleSchemaName" + Then Enter runtime argument value "operationName" for key "oracleOperationName" + Then Enter runtime argument value "relationTableKey" for key "oracleTableKey" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target Oracle table is equal to the values from source BigQuery table diff --git a/oracle-plugin/src/e2e-test/resources/errorMessage.properties b/oracle-plugin/src/e2e-test/resources/errorMessage.properties index 895444408..10d147773 100644 --- a/oracle-plugin/src/e2e-test/resources/errorMessage.properties +++ b/oracle-plugin/src/e2e-test/resources/errorMessage.properties @@ -19,3 +19,4 @@ errorLogsMessageInvalidBoundingQuery=Spark program 'phase-1' failed with error: blank.database.message=Required property 'database' has no value. blank.connection.message=Error encountered while configuring the stage: 'SQL Error occurred, sqlState: '72000', errorCode: '1005', errorMessage: SQL Exception occurred: [Message='ORA-01005: null password given; logon denied ', SQLState='72000', ErrorCode='1005'].' blank.HostBlank.message=Error encountered while configuring the stage: 'SQL Error occurred, sqlState: '08006', errorCode: '17002', errorMessage: SQL Exception occurred: [Message='IO Error: The Network Adapter could not establish the connection', SQLState='08006', ErrorCode='17002'].' +errorMessageUpdateUpsertOperationName=Table key must be set if the operation is 'Update' or 'Upsert'. diff --git a/oracle-plugin/src/e2e-test/resources/pluginParameters.properties b/oracle-plugin/src/e2e-test/resources/pluginParameters.properties index e3f81f1bc..55dba1533 100644 --- a/oracle-plugin/src/e2e-test/resources/pluginParameters.properties +++ b/oracle-plugin/src/e2e-test/resources/pluginParameters.properties @@ -86,6 +86,7 @@ invalidPassword=testPassword invalidBoundingQuery=SELECT MIN(id),MAX(id) FROM table invalidBoundingQueryValue=select; invalidTable=table +invalidOracleTableKey=asdas #ORACLE Valid Properties connectionArgumentsList=[{"key":"queryTimeout","value":"-1"}] @@ -97,6 +98,11 @@ splitByColumn=ID importQuery=where $CONDITIONS connectionArguments=queryTimeout=50 transactionIsolationLevel=TRANSACTION_READ_COMMITTED +operationName=update +oracleTableKey=LASTNAME +relationTableKey=ID +upsertOperationName=upsert +upsertRelationTableKey=COL2 #bq properties projectId=cdf-athena