Skip to content

Commit e075690

Browse files
Shubhangi-csvikasrathee-cs
authored andcommitted
Redshift Junits
Redshift Junits
1 parent 6bc17b9 commit e075690

File tree

6 files changed

+257
-7
lines changed

6 files changed

+257
-7
lines changed

amazon-redshift-plugin/src/main/java/io/cdap/plugin/amazon/redshift/RedshiftConnectorConfig.java

+6-6
Original file line numberDiff line numberDiff line change
@@ -72,16 +72,16 @@ public int getPort() {
7272

7373
@Override
7474
public String getConnectionString() {
75-
return String.format(
76-
RedshiftConstants.REDSHIFT_CONNECTION_STRING_FORMAT,
77-
host,
78-
getPort(),
79-
database);
75+
return String.format(
76+
RedshiftConstants.REDSHIFT_CONNECTION_STRING_FORMAT,
77+
host,
78+
getPort(),
79+
database);
8080
}
8181

8282
@Override
8383
public boolean canConnect() {
8484
return super.canConnect() && !containsMacro(ConnectionConfig.HOST) &&
85-
!containsMacro(ConnectionConfig.PORT) && !containsMacro(ConnectionConfig.DATABASE);
85+
!containsMacro(ConnectionConfig.PORT) && !containsMacro(ConnectionConfig.DATABASE);
8686
}
8787
}

amazon-redshift-plugin/src/main/java/io/cdap/plugin/amazon/redshift/RedshiftSource.java

+8
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616

1717
package io.cdap.plugin.amazon.redshift;
1818

19+
import com.google.common.annotations.VisibleForTesting;
1920
import io.cdap.cdap.api.annotation.Description;
2021
import io.cdap.cdap.api.annotation.Macro;
2122
import io.cdap.cdap.api.annotation.Metadata;
@@ -108,6 +109,13 @@ public Map<String, String> getDBSpecificArguments() {
108109
return Collections.emptyMap();
109110
}
110111

112+
@VisibleForTesting
113+
public RedshiftSourceConfig(@Nullable Boolean useConnection,
114+
@Nullable RedshiftConnectorConfig connection) {
115+
this.useConnection = useConnection;
116+
this.connection = connection;
117+
}
118+
111119
@Override
112120
public Integer getFetchSize() {
113121
Integer fetchSize = super.getFetchSize();

amazon-redshift-plugin/src/test/java/io/cdap/plugin/amazon/redshift/RedshiftConnectorUnitTest.java

+13
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,19 @@ public void getTableNameTest() {
3939
CONNECTOR.getTableName("db", "schema", "table"));
4040
}
4141

42+
@Test
43+
public void getRandomQuery() {
44+
Assert.assertEquals("SELECT * FROM TestData\n" +
45+
"TABLESAMPLE BERNOULLI (100.0 * 10 / (SELECT COUNT(*) FROM TestData))",
46+
CONNECTOR.getRandomQuery("TestData", 10));
47+
}
48+
49+
@Test
50+
public void getDBRecordType() {
51+
Assert.assertEquals("class io.cdap.plugin.amazon.redshift.RedshiftDBRecord",
52+
CONNECTOR.getDBRecordType().toString());
53+
}
54+
4255
/**
4356
* Unit tests for getTableQuery()
4457
*/
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,131 @@
1+
/*
2+
* Copyright © 2023 Cask Data, Inc.
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
5+
* use this file except in compliance with the License. You may obtain a copy of
6+
* the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12+
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13+
* License for the specific language governing permissions and limitations under
14+
* the License.
15+
*/
16+
17+
package io.cdap.plugin.amazon.redshift;
18+
19+
import com.google.common.collect.Lists;
20+
import io.cdap.cdap.api.data.schema.Schema;
21+
import org.junit.Assert;
22+
import org.junit.Test;
23+
import org.junit.runner.RunWith;
24+
import org.mockito.Mockito;
25+
import org.mockito.junit.MockitoJUnitRunner;
26+
27+
import java.sql.ResultSet;
28+
import java.sql.ResultSetMetaData;
29+
import java.sql.SQLException;
30+
import java.sql.Types;
31+
import java.util.List;
32+
33+
@RunWith(MockitoJUnitRunner.class)
34+
public class RedshiftSchemaReaderTest {
35+
36+
@Test
37+
public void testGetSchema() throws SQLException {
38+
RedshiftSchemaReader schemaReader = new RedshiftSchemaReader();
39+
40+
ResultSetMetaData metadata = Mockito.mock(ResultSetMetaData.class);
41+
Mockito.when(metadata.getColumnTypeName(1)).thenReturn("timetz");
42+
Mockito.when(metadata.getColumnType(1)).thenReturn(Types.TIMESTAMP);
43+
44+
Schema schema = schemaReader.getSchema(metadata, 1);
45+
46+
Assert.assertEquals(Schema.of(Schema.Type.STRING), schema);
47+
}
48+
49+
@Test
50+
public void testGetSchemaWithIntType() throws SQLException {
51+
RedshiftSchemaReader schemaReader = new RedshiftSchemaReader();
52+
ResultSetMetaData metadata = Mockito.mock(ResultSetMetaData.class);
53+
Mockito.when(metadata.getColumnTypeName(1)).thenReturn("INT");
54+
Mockito.when(metadata.getColumnType(1)).thenReturn(Types.NUMERIC);
55+
Schema schema = schemaReader.getSchema(metadata, 1);
56+
57+
Assert.assertEquals(Schema.of(Schema.Type.INT), schema);
58+
}
59+
60+
@Test
61+
public void testGetSchemaWithNumericTypeWithPrecision() throws SQLException {
62+
RedshiftSchemaReader schemaReader = new RedshiftSchemaReader();
63+
ResultSetMetaData metadata = Mockito.mock(ResultSetMetaData.class);
64+
Mockito.when(metadata.getColumnTypeName(1)).thenReturn("STRING");
65+
Mockito.when(metadata.getColumnType(1)).thenReturn(Types.NUMERIC);
66+
Mockito.when(metadata.getPrecision(1)).thenReturn(0);
67+
68+
Schema schema = schemaReader.getSchema(metadata, 1);
69+
70+
Assert.assertEquals(Schema.of(Schema.Type.STRING), schema);
71+
}
72+
73+
@Test
74+
public void testGetSchemaWithOtherTypes() throws SQLException {
75+
RedshiftSchemaReader schemaReader = new RedshiftSchemaReader();
76+
ResultSetMetaData metadata = Mockito.mock(ResultSetMetaData.class);
77+
Mockito.when(metadata.getColumnTypeName(1)).thenReturn("BIGINT");
78+
Mockito.when(metadata.getColumnType(1)).thenReturn(Types.BIGINT);
79+
Schema schema = schemaReader.getSchema(metadata, 1);
80+
81+
Assert.assertEquals(Schema.of(Schema.Type.LONG), schema);
82+
83+
Mockito.when(metadata.getColumnTypeName(2)).thenReturn("timestamp");
84+
Mockito.when(metadata.getColumnType(2)).thenReturn(Types.TIMESTAMP);
85+
86+
schema = schemaReader.getSchema(metadata, 2);
87+
88+
Assert.assertEquals(Schema.of(Schema.LogicalType.DATETIME), schema);
89+
}
90+
91+
@Test
92+
public void testShouldIgnoreColumn() throws SQLException {
93+
RedshiftSchemaReader schemaReader = new RedshiftSchemaReader("sessionID");
94+
ResultSetMetaData metadata = Mockito.mock(ResultSetMetaData.class);
95+
Mockito.when(metadata.getColumnName(1)).thenReturn("c_sessionID");
96+
Assert.assertTrue(schemaReader.shouldIgnoreColumn(metadata, 1));
97+
Mockito.when(metadata.getColumnName(2)).thenReturn("sqn_sessionID");
98+
Assert.assertTrue(schemaReader.shouldIgnoreColumn(metadata, 2));
99+
Mockito.when(metadata.getColumnName(3)).thenReturn("columnName");
100+
Assert.assertFalse(schemaReader.shouldIgnoreColumn(metadata, 3));
101+
}
102+
103+
@Test
104+
public void testGetSchemaFields() throws SQLException {
105+
RedshiftSchemaReader schemaReader = new RedshiftSchemaReader();
106+
107+
ResultSet resultSet = Mockito.mock(ResultSet.class);
108+
ResultSetMetaData metadata = Mockito.mock(ResultSetMetaData.class);
109+
110+
Mockito.when(resultSet.getMetaData()).thenReturn(metadata);
111+
112+
// Mock two columns with different types
113+
Mockito.when(metadata.getColumnCount()).thenReturn(2);
114+
Mockito.when(metadata.getColumnTypeName(1)).thenReturn("INT");
115+
Mockito.when(metadata.getColumnType(1)).thenReturn(Types.NUMERIC);
116+
Mockito.when(metadata.getColumnName(1)).thenReturn("column1");
117+
118+
Mockito.when(metadata.getColumnTypeName(2)).thenReturn("BIGINT");
119+
Mockito.when(metadata.getColumnType(2)).thenReturn(Types.BIGINT);
120+
Mockito.when(metadata.getColumnName(2)).thenReturn("column2");
121+
122+
List<Schema.Field> expectedSchemaFields = Lists.newArrayList();
123+
expectedSchemaFields.add(Schema.Field.of("column1", Schema.nullableOf(Schema.of(Schema.Type.INT))));
124+
expectedSchemaFields.add(Schema.Field.of("column2", Schema.nullableOf(Schema.of(Schema.Type.LONG))));
125+
126+
List<Schema.Field> actualSchemaFields = schemaReader.getSchemaFields(resultSet);
127+
128+
Assert.assertEquals(expectedSchemaFields.get(0).getName(), actualSchemaFields.get(0).getName());
129+
Assert.assertEquals(expectedSchemaFields.get(1).getName(), actualSchemaFields.get(1).getName());
130+
}
131+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,98 @@
1+
/*
2+
* Copyright © 2023 Cask Data, Inc.
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
5+
* use this file except in compliance with the License. You may obtain a copy of
6+
* the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12+
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13+
* License for the specific language governing permissions and limitations under
14+
* the License.
15+
*/
16+
17+
package io.cdap.plugin.amazon.redshift;
18+
19+
import io.cdap.cdap.etl.api.batch.BatchSourceContext;
20+
import io.cdap.plugin.common.LineageRecorder;
21+
import io.cdap.plugin.db.SchemaReader;
22+
import org.apache.hadoop.mapreduce.lib.db.DBWritable;
23+
import org.junit.Assert;
24+
import org.junit.Test;
25+
import org.junit.runner.RunWith;
26+
import org.mockito.Mockito;
27+
import org.mockito.junit.MockitoJUnitRunner;
28+
29+
import java.util.Map;
30+
31+
@RunWith(MockitoJUnitRunner.class)
32+
public class RedshiftSourceTest {
33+
34+
@Test
35+
public void testGetDBSpecificArguments() {
36+
RedshiftConnectorConfig connectorConfig = new RedshiftConnectorConfig("username", "password",
37+
"jdbcPluginName", "connectionArguments",
38+
"host", "database", 1101);
39+
RedshiftSource.RedshiftSourceConfig config = new RedshiftSource.RedshiftSourceConfig(false, connectorConfig);
40+
Map<String, String> dbSpecificArguments = config.getDBSpecificArguments();
41+
Assert.assertEquals(0, dbSpecificArguments.size());
42+
}
43+
44+
@Test
45+
public void testGetFetchSize() {
46+
RedshiftConnectorConfig connectorConfig = new RedshiftConnectorConfig("username", "password",
47+
"jdbcPluginName", "connectionArguments",
48+
"host", "database", 1101);
49+
RedshiftSource.RedshiftSourceConfig config = new RedshiftSource.RedshiftSourceConfig(false, connectorConfig);
50+
Integer fetchSize = config.getFetchSize();
51+
Assert.assertEquals(1000, fetchSize.intValue());
52+
}
53+
54+
@Test
55+
public void testGetSchemaReader() {
56+
RedshiftConnectorConfig connectorConfig = new RedshiftConnectorConfig("username", "password",
57+
"jdbcPluginName", "connectionArguments",
58+
"host", "database", 1101);
59+
RedshiftSource source = new RedshiftSource(new RedshiftSource.RedshiftSourceConfig(false, connectorConfig));
60+
SchemaReader schemaReader = source.getSchemaReader();
61+
Assert.assertTrue(schemaReader instanceof RedshiftSchemaReader);
62+
}
63+
64+
@Test
65+
public void testGetDBRecordType() {
66+
RedshiftConnectorConfig connectorConfig = new RedshiftConnectorConfig("username", "password",
67+
"jdbcPluginName", "connectionArguments",
68+
"host", "database", 1101);
69+
RedshiftSource source = new RedshiftSource(new RedshiftSource.RedshiftSourceConfig(false, connectorConfig));
70+
Class<? extends DBWritable> dbRecordType = source.getDBRecordType();
71+
Assert.assertEquals(RedshiftDBRecord.class, dbRecordType);
72+
}
73+
74+
@Test
75+
public void testCreateConnectionString() {
76+
RedshiftConnectorConfig connectorConfig = new RedshiftConnectorConfig("username", "password",
77+
"jdbcPluginName", "connectionArguments",
78+
"localhost", "test", 5439);
79+
RedshiftSource.RedshiftSourceConfig config = new RedshiftSource.RedshiftSourceConfig(false, connectorConfig);
80+
81+
RedshiftSource source = new RedshiftSource(config);
82+
String connectionString = source.createConnectionString();
83+
Assert.assertEquals("jdbc:redshift://localhost:5439/test", connectionString);
84+
}
85+
86+
@Test
87+
public void testGetLineageRecorder() {
88+
BatchSourceContext context = Mockito.mock(BatchSourceContext.class);
89+
RedshiftConnectorConfig connectorConfig = new RedshiftConnectorConfig("username", "password",
90+
"jdbcPluginName", "connectionArguments",
91+
"host", "database", 1101);
92+
RedshiftSource.RedshiftSourceConfig config = new RedshiftSource.RedshiftSourceConfig(false, connectorConfig);
93+
RedshiftSource source = new RedshiftSource(config);
94+
95+
LineageRecorder lineageRecorder = source.getLineageRecorder(context);
96+
Assert.assertNotNull(lineageRecorder);
97+
}
98+
}

amazon-redshift-plugin/widgets/Redshift-batchsource.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,7 @@
135135
},
136136
{
137137
"widget-type": "textbox",
138-
"label": "Split Column",
138+
"label": "Split-By Field Name",
139139
"name": "splitBy"
140140
},
141141
{

0 commit comments

Comments
 (0)