Skip to content

Commit 5c338cb

Browse files
authored
Fix for Map columns with Enums (#1236)
1 parent b540efa commit 5c338cb

File tree

2 files changed

+51
-2
lines changed

2 files changed

+51
-2
lines changed

lib/column/map.go

+11-1
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,17 @@ func (col *Map) Name() string {
6666

6767
func (col *Map) parse(t Type, tz *time.Location) (_ Interface, err error) {
6868
col.chType = t
69-
if types := strings.SplitN(t.params(), ",", 2); len(types) == 2 {
69+
types := make([]string, 2, 2)
70+
typeParams := t.params()
71+
idx := strings.Index(typeParams, ",")
72+
if strings.HasPrefix(typeParams, "Enum") {
73+
idx = strings.Index(typeParams, "),") + 1
74+
}
75+
if idx > 0 {
76+
types[0] = typeParams[:idx]
77+
types[1] = typeParams[idx+1:]
78+
}
79+
if types[0] != "" && types[1] != "" {
7080
if col.keys, err = Type(strings.TrimSpace(types[0])).Column(col.name, tz); err != nil {
7181
return nil, err
7282
}

tests/map_test.go

+40-1
Original file line numberDiff line numberDiff line change
@@ -122,6 +122,9 @@ func TestColumnarMap(t *testing.T) {
122122
Col1 Map(String, UInt64)
123123
, Col2 Map(String, UInt64)
124124
, Col3 Map(String, UInt64)
125+
, Col4 Map(Enum16('one' = 1, 'two' = 2), UInt64)
126+
, Col5 Map(String, Enum16('one' = 1, 'two' = 2))
127+
, Col6 Map(Enum8('one' = 1, 'two' = 2), Enum8('red' = 1, 'blue' = 2))
125128
) Engine MergeTree() ORDER BY tuple()
126129
`
127130
defer func() {
@@ -134,6 +137,9 @@ func TestColumnarMap(t *testing.T) {
134137
col1Data = []map[string]uint64{}
135138
col2Data = []map[string]uint64{}
136139
col3Data = []map[string]uint64{}
140+
col4Data = []map[string]uint64{}
141+
col5Data = []map[string]string{}
142+
col6Data = []map[string]string{}
137143
)
138144
for i := 0; i < 100; i++ {
139145
col1Data = append(col1Data, map[string]uint64{
@@ -145,17 +151,35 @@ func TestColumnarMap(t *testing.T) {
145151
fmt.Sprintf("key_col_2_%d_2", i): uint64(i),
146152
})
147153
col3Data = append(col3Data, map[string]uint64{})
154+
col4Data = append(col4Data, map[string]uint64{
155+
"one": uint64(i),
156+
"two": uint64(i),
157+
})
158+
col5Data = append(col5Data, map[string]string{
159+
fmt.Sprintf("key_col_2_%d_1", i): "one",
160+
fmt.Sprintf("key_col_2_%d_2", i): "two",
161+
})
162+
col6Data = append(col6Data, map[string]string{
163+
"one": "red",
164+
"two": "blue",
165+
})
148166
}
149167
require.NoError(t, batch.Column(0).Append(col1Data))
150168
require.NoError(t, batch.Column(1).Append(col2Data))
151169
require.NoError(t, batch.Column(2).Append(col3Data))
170+
require.NoError(t, batch.Column(3).Append(col4Data))
171+
require.NoError(t, batch.Column(4).Append(col5Data))
172+
require.NoError(t, batch.Column(5).Append(col6Data))
152173
require.Equal(t, 100, batch.Rows())
153174
require.NoError(t, batch.Send())
154175
{
155176
var (
156177
col1 map[string]uint64
157178
col2 map[string]uint64
158179
col3 map[string]uint64
180+
col4 map[string]uint64
181+
col5 map[string]string
182+
col6 map[string]string
159183
col1Data = map[string]uint64{
160184
"key_col_1_10_1": 10,
161185
"key_col_1_10_2": 10,
@@ -165,11 +189,26 @@ func TestColumnarMap(t *testing.T) {
165189
"key_col_2_10_2": 10,
166190
}
167191
col3Data = map[string]uint64{}
192+
col4Data = map[string]uint64{
193+
"one": 10,
194+
"two": 10,
195+
}
196+
col5Data = map[string]string{
197+
"key_col_2_10_1": "one",
198+
"key_col_2_10_2": "two",
199+
}
200+
col6Data = map[string]string{
201+
"one": "red",
202+
"two": "blue",
203+
}
168204
)
169-
require.NoError(t, conn.QueryRow(ctx, "SELECT * FROM test_map WHERE Col1['key_col_1_10_1'] = $1", 10).Scan(&col1, &col2, &col3))
205+
require.NoError(t, conn.QueryRow(ctx, "SELECT * FROM test_map WHERE Col1['key_col_1_10_1'] = $1", 10).Scan(&col1, &col2, &col3, &col4, &col5, &col6))
170206
assert.Equal(t, col1Data, col1)
171207
assert.Equal(t, col2Data, col2)
172208
assert.Equal(t, col3Data, col3)
209+
assert.Equal(t, col4Data, col4)
210+
assert.Equal(t, col5Data, col5)
211+
assert.Equal(t, col6Data, col6)
173212
}
174213
}
175214

0 commit comments

Comments
 (0)