Skip to content

Commit 4e33952

Browse files
Add e2e test case for small case schema.
Add e2e test case for small case schema.
1 parent e966109 commit 4e33952

File tree

7 files changed

+103
-6
lines changed

7 files changed

+103
-6
lines changed

oracle-plugin/src/e2e-test/features/sink/OracleRunTime.feature

+50
Original file line numberDiff line numberDiff line change
@@ -117,3 +117,53 @@ Feature: Oracle - Verify data transfer from BigQuery source to Oracle sink
117117
Then Verify the pipeline status is "Succeeded"
118118
Then Validate records transferred to target table with record counts of BigQuery table
119119
Then Validate the values of records transferred to target Oracle table is equal to the values from source BigQuery table
120+
121+
@BQ_SOURCE_TEST_SMALL_CASE @ORACLE_TEST_TABLE
122+
Scenario: To verify data is getting transferred from BigQuery source to Oracle sink successfully when schema is coming in small case
123+
Given Open Datafusion Project to configure pipeline
124+
When Expand Plugin group in the LHS plugins list: "Source"
125+
When Select plugin: "BigQuery" from the plugins list as: "Source"
126+
When Expand Plugin group in the LHS plugins list: "Sink"
127+
When Select plugin: "Oracle" from the plugins list as: "Sink"
128+
Then Connect plugins: "BigQuery" and "Oracle" to establish connection
129+
Then Navigate to the properties page of plugin: "BigQuery"
130+
Then Replace input plugin property: "project" with value: "projectId"
131+
Then Enter input plugin property: "datasetProject" with value: "projectId"
132+
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
133+
Then Enter input plugin property: "dataset" with value: "dataset"
134+
Then Enter input plugin property: "table" with value: "bqSourceTable"
135+
Then Click on the Get Schema button
136+
Then Verify the Output Schema matches the Expected Schema: "bqOutputDatatypesSchemaSmallCase"
137+
Then Validate "BigQuery" plugin properties
138+
Then Close the Plugin Properties page
139+
Then Navigate to the properties page of plugin: "Oracle"
140+
Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
141+
Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields
142+
Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields
143+
Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
144+
Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
145+
Then Select radio button plugin property: "connectionType" with value: "service"
146+
Then Select radio button plugin property: "role" with value: "normal"
147+
Then Enter input plugin property: "referenceName" with value: "sourceRef"
148+
Then Replace input plugin property: "database" with value: "databaseName"
149+
Then Replace input plugin property: "tableName" with value: "targetTable"
150+
Then Replace input plugin property: "dbSchemaName" with value: "schema"
151+
Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
152+
Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
153+
Then Enter input plugin property: "referenceName" with value: "targetRef"
154+
Then Select radio button plugin property: "connectionType" with value: "service"
155+
Then Select radio button plugin property: "role" with value: "normal"
156+
Then Validate "Oracle" plugin properties
157+
Then Close the Plugin Properties page
158+
Then Save the pipeline
159+
Then Preview and run the pipeline
160+
Then Verify the preview of pipeline is "success"
161+
Then Click on preview data for Oracle sink
162+
Then Close the preview data
163+
Then Deploy the pipeline
164+
Then Run the Pipeline in Runtime
165+
Then Wait till pipeline is in running state
166+
Then Open and capture logs
167+
Then Verify the pipeline status is "Succeeded"
168+
Then Validate records transferred to target table with record counts of BigQuery table
169+
Then Validate the values of records transferred to target Oracle table is equal to the values from source BigQuery table with case

oracle-plugin/src/e2e-test/java/io.cdap.plugin/BQValidation.java

+8-5
Original file line numberDiff line numberDiff line change
@@ -68,11 +68,12 @@ public static boolean validateDBToBQRecordValues(String schema, String sourceTab
6868
ResultSet.HOLD_CURSORS_OVER_COMMIT);
6969

7070
ResultSet rsSource = statement1.executeQuery(getSourceQuery);
71-
return compareResultSetAndJsonData(rsSource, jsonResponse);
71+
return compareResultSetAndJsonData(rsSource, jsonResponse, false);
7272
}
7373
}
7474

75-
public static boolean validateBQToDBRecordValues(String schema, String sourceTable, String targetTable)
75+
public static boolean validateBQToDBRecordValues(String schema, String sourceTable, String targetTable,
76+
boolean isSchemaSmallCase)
7677
throws SQLException, ClassNotFoundException, ParseException, IOException, InterruptedException {
7778
List<JsonObject> jsonResponse = new ArrayList<>();
7879
List<Object> bigQueryRows = new ArrayList<>();
@@ -88,7 +89,7 @@ public static boolean validateBQToDBRecordValues(String schema, String sourceTab
8889
ResultSet.HOLD_CURSORS_OVER_COMMIT);
8990

9091
ResultSet rsTarget = statement1.executeQuery(getTargetQuery);
91-
return compareResultSetAndJsonData(rsTarget, jsonResponse);
92+
return compareResultSetAndJsonData(rsTarget, jsonResponse, isSchemaSmallCase);
9293
}
9394
}
9495

@@ -119,7 +120,8 @@ private static void getBigQueryTableData(String table, List<Object> bigQueryRows
119120
* @throws ParseException If an error occurs while parsing the data.
120121
*/
121122

122-
public static boolean compareResultSetAndJsonData(ResultSet rsSource, List<JsonObject> bigQueryData)
123+
public static boolean compareResultSetAndJsonData(ResultSet rsSource, List<JsonObject> bigQueryData,
124+
boolean isSchemaSmallCase)
123125
throws SQLException, ParseException {
124126
ResultSetMetaData mdSource = rsSource.getMetaData();
125127
boolean result = false;
@@ -146,7 +148,8 @@ public static boolean compareResultSetAndJsonData(ResultSet rsSource, List<JsonO
146148
while (currentColumnCount <= columnCountSource) {
147149
String columnTypeName = mdSource.getColumnTypeName(currentColumnCount);
148150
int columnType = mdSource.getColumnType(currentColumnCount);
149-
String columnName = mdSource.getColumnName(currentColumnCount);
151+
String columnName = isSchemaSmallCase ? mdSource.getColumnName(currentColumnCount).toLowerCase() :
152+
mdSource.getColumnName(currentColumnCount);
150153
// Perform different comparisons based on column type
151154
switch (columnType) {
152155
// Since we skip BFILE in Oracle Sink, we are not comparing the BFILE source and sink values

oracle-plugin/src/e2e-test/java/io.cdap.plugin/common.stepsdesign/TestSetupHooks.java

+14
Original file line numberDiff line numberDiff line change
@@ -372,4 +372,18 @@ private static void createSourceBQTableWithQueries(String bqCreateTableQueryFile
372372
PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable);
373373
BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " created successfully");
374374
}
375+
376+
@Before(order = 1, value = "@BQ_SOURCE_TEST_SMALL_CASE")
377+
public static void createTempSourceBQTableSmallCase() throws IOException, InterruptedException {
378+
createSourceBQTableWithQueries(PluginPropertyUtils.pluginProp("CreateBQTableQueryFileSmallCase"),
379+
PluginPropertyUtils.pluginProp("InsertBQDataQueryFileSmallCase"));
380+
}
381+
382+
@After(order = 1, value = "@BQ_SOURCE_TEST_SMALL_CASE")
383+
public static void deleteTempSourceBQTableSmallCase() throws IOException, InterruptedException {
384+
String bqSourceTable = PluginPropertyUtils.pluginProp("bqSourceTable");
385+
BigQueryClient.dropBqQuery(bqSourceTable);
386+
BeforeActions.scenario.write("BQ source Table " + bqSourceTable + " deleted successfully");
387+
PluginPropertyUtils.removePluginProp("bqSourceTable");
388+
}
375389
}

oracle-plugin/src/e2e-test/java/io.cdap.plugin/oracle/stepsdesign/Oracle.java

+20-1
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,26 @@ public void validateTheValuesOfRecordsTransferredToTargetOracleTableIsEqualToThe
9494

9595
boolean recordsMatched = BQValidation.validateBQToDBRecordValues(PluginPropertyUtils.pluginProp("schema"),
9696
PluginPropertyUtils.pluginProp("bqSourceTable"),
97-
PluginPropertyUtils.pluginProp("targetTable"));
97+
PluginPropertyUtils.pluginProp("targetTable"),
98+
false);
99+
Assert.assertTrue("Value of records transferred to the target table should be equal to the value " +
100+
"of the records in the source table", recordsMatched);
101+
}
102+
103+
@Then("Validate the values of records transferred to target Oracle table is equal to the values from source " +
104+
"BigQuery table with case")
105+
public void
106+
validateTheValuesOfRecordsTransferredToTargetOracleTableIsEqualToTheValuesFromSourceBigQueryTableWithCase()
107+
throws IOException, InterruptedException, IOException, SQLException, ClassNotFoundException, ParseException {
108+
int sourceBQRecordsCount = BigQueryClient.countBqQuery(PluginPropertyUtils.pluginProp("bqSourceTable"));
109+
BeforeActions.scenario.write("No of Records from source BigQuery table:" + sourceBQRecordsCount);
110+
Assert.assertEquals("Out records should match with target Oracle table records count",
111+
CdfPipelineRunAction.getCountDisplayedOnSourcePluginAsRecordsOut(), sourceBQRecordsCount);
112+
113+
boolean recordsMatched = BQValidation.validateBQToDBRecordValues(PluginPropertyUtils.pluginProp("schema"),
114+
PluginPropertyUtils.pluginProp("bqSourceTable"),
115+
PluginPropertyUtils.pluginProp("targetTable"),
116+
true);
98117
Assert.assertTrue("Value of records transferred to the target table should be equal to the value " +
99118
"of the records in the source table", recordsMatched);
100119
}

oracle-plugin/src/e2e-test/resources/pluginParameters.properties

+4
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,7 @@ importQuery=where $CONDITIONS
9898
projectId=cdf-athena
9999
dataset=test_automation
100100
bqOutputDatatypesSchema=[{"key":"ID","value":"decimal"},{"key":"LASTNAME","value":"string"}]
101+
bqOutputDatatypesSchemaSmallCase=[{"key":"id","value":"decimal"},{"key":"lastname","value":"string"}]
101102
jdbcUrl=jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;ProjectId=%s;OAuthType=3;
102103

103104
#bq macro properties
@@ -107,6 +108,9 @@ bqUpdateTableSchema=true
107108
#bq queries file path
108109
CreateBQTableQueryFile=testdata/BigQuery/BigQueryCreateTableQuery.txt
109110
InsertBQDataQueryFile=testdata/BigQuery/BigQueryInsertDataQuery.txt
111+
#bq queries file path for Small Case Schema
112+
CreateBQTableQueryFileSmallCase=testdata/BigQuery/BigQueryCreateTableQuerySmallCase.txt
113+
InsertBQDataQueryFileSmallCase=testdata/BigQuery/BigQueryInsertDataQuerySmallCase.txt
110114

111115
#ORACLE Datatypes
112116
bigQueryColumns=(COL23 FLOAT(4), COL28 TIMESTAMP, COL29 TIMESTAMP(9), COL30 TIMESTAMP WITH TIME ZONE, \
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
create table `DATASET.TABLE_NAME` (id NUMERIC, lastname STRING)
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
insert into `DATASET.TABLE_NAME` (id, lastname) values
2+
(1,'Shelby'),
3+
(2,'Simpson'),
4+
(3,'Williams'),
5+
(4,'Sherry'),
6+
(5,'James');

0 commit comments

Comments
 (0)