70
70
71
71
DEFAULT_SORT_ORDER = SortOrder (SortField (source_id = 2 , transform = IdentityTransform ()))
72
72
73
+ AVAILABLE_ENGINES = ["DataLakeCatalog" , "Iceberg" ]
74
+
73
75
74
76
def list_namespaces ():
75
77
response = requests .get (f"{ BASE_URL_LOCAL } /namespaces" )
@@ -120,7 +122,7 @@ def generate_record():
120
122
121
123
122
124
def create_clickhouse_iceberg_database (
123
- started_cluster , node , name , additional_settings = {}
125
+ started_cluster , node , name , additional_settings = {}, engine = 'DataLakeCatalog'
124
126
):
125
127
settings = {
126
128
"catalog_type" : "rest" ,
@@ -134,7 +136,7 @@ def create_clickhouse_iceberg_database(
134
136
f"""
135
137
DROP DATABASE IF EXISTS { name } ;
136
138
SET allow_experimental_database_iceberg=true;
137
- CREATE DATABASE { name } ENGINE = DataLakeCatalog ('{ BASE_URL } ', 'minio', '{ minio_secret_key } ')
139
+ CREATE DATABASE { name } ENGINE = { engine } ('{ BASE_URL } ', 'minio', '{ minio_secret_key } ')
138
140
SETTINGS { "," .join ((k + "=" + repr (v ) for k , v in settings .items ()))}
139
141
"""
140
142
)
@@ -180,7 +182,8 @@ def started_cluster():
180
182
cluster .shutdown ()
181
183
182
184
183
- def test_list_tables (started_cluster ):
185
+ @pytest .mark .parametrize ("engine" , AVAILABLE_ENGINES )
186
+ def test_list_tables (started_cluster , engine ):
184
187
node = started_cluster .instances ["node1" ]
185
188
186
189
root_namespace = f"clickhouse_{ uuid .uuid4 ()} "
@@ -211,7 +214,7 @@ def test_list_tables(started_cluster):
211
214
for namespace in [namespace_1 , namespace_2 ]:
212
215
assert len (catalog .list_tables (namespace )) == 0
213
216
214
- create_clickhouse_iceberg_database (started_cluster , node , CATALOG_NAME )
217
+ create_clickhouse_iceberg_database (started_cluster , node , CATALOG_NAME , engine = engine )
215
218
216
219
tables_list = ""
217
220
for table in namespace_1_tables :
@@ -246,7 +249,8 @@ def test_list_tables(started_cluster):
246
249
)
247
250
248
251
249
- def test_many_namespaces (started_cluster ):
252
+ @pytest .mark .parametrize ("engine" , AVAILABLE_ENGINES )
253
+ def test_many_namespaces (started_cluster , engine ):
250
254
node = started_cluster .instances ["node1" ]
251
255
root_namespace_1 = f"A_{ uuid .uuid4 ()} "
252
256
root_namespace_2 = f"B_{ uuid .uuid4 ()} "
@@ -267,7 +271,7 @@ def test_many_namespaces(started_cluster):
267
271
for table in tables :
268
272
create_table (catalog , namespace , table )
269
273
270
- create_clickhouse_iceberg_database (started_cluster , node , CATALOG_NAME )
274
+ create_clickhouse_iceberg_database (started_cluster , node , CATALOG_NAME , engine = engine )
271
275
272
276
for namespace in namespaces :
273
277
for table in tables :
@@ -279,7 +283,8 @@ def test_many_namespaces(started_cluster):
279
283
)
280
284
281
285
282
- def test_select (started_cluster ):
286
+ @pytest .mark .parametrize ("engine" , AVAILABLE_ENGINES )
287
+ def test_select (started_cluster , engine ):
283
288
node = started_cluster .instances ["node1" ]
284
289
285
290
test_ref = f"test_list_tables_{ uuid .uuid4 ()} "
@@ -307,7 +312,7 @@ def test_select(started_cluster):
307
312
df = pa .Table .from_pylist (data )
308
313
table .append (df )
309
314
310
- create_clickhouse_iceberg_database (started_cluster , node , CATALOG_NAME )
315
+ create_clickhouse_iceberg_database (started_cluster , node , CATALOG_NAME , engine = engine )
311
316
312
317
expected = DEFAULT_CREATE_TABLE .format (CATALOG_NAME , namespace , table_name )
313
318
assert expected == node .query (
@@ -319,7 +324,8 @@ def test_select(started_cluster):
319
324
)
320
325
321
326
322
- def test_hide_sensitive_info (started_cluster ):
327
+ @pytest .mark .parametrize ("engine" , AVAILABLE_ENGINES )
328
+ def test_hide_sensitive_info (started_cluster , engine ):
323
329
node = started_cluster .instances ["node1" ]
324
330
325
331
test_ref = f"test_hide_sensitive_info_{ uuid .uuid4 ()} "
@@ -337,6 +343,7 @@ def test_hide_sensitive_info(started_cluster):
337
343
node ,
338
344
CATALOG_NAME ,
339
345
additional_settings = {"catalog_credential" : "SECRET_1" },
346
+ engine = engine ,
340
347
)
341
348
assert "SECRET_1" not in node .query (f"SHOW CREATE DATABASE { CATALOG_NAME } " )
342
349
@@ -345,11 +352,13 @@ def test_hide_sensitive_info(started_cluster):
345
352
node ,
346
353
CATALOG_NAME ,
347
354
additional_settings = {"auth_header" : "SECRET_2" },
355
+ engine = engine ,
348
356
)
349
357
assert "SECRET_2" not in node .query (f"SHOW CREATE DATABASE { CATALOG_NAME } " )
350
358
351
359
352
- def test_tables_with_same_location (started_cluster ):
360
+ @pytest .mark .parametrize ("engine" , AVAILABLE_ENGINES )
361
+ def test_tables_with_same_location (started_cluster , engine ):
353
362
node = started_cluster .instances ["node1" ]
354
363
355
364
test_ref = f"test_tables_with_same_location_{ uuid .uuid4 ()} "
@@ -380,7 +389,7 @@ def record(key):
380
389
df = pa .Table .from_pylist (data )
381
390
table_2 .append (df )
382
391
383
- create_clickhouse_iceberg_database (started_cluster , node , CATALOG_NAME )
392
+ create_clickhouse_iceberg_database (started_cluster , node , CATALOG_NAME , engine = engine )
384
393
385
394
assert 'aaa\n aaa\n aaa' == node .query (f"SELECT symbol FROM { CATALOG_NAME } .`{ namespace } .{ table_name } `" ).strip ()
386
395
assert 'bbb\n bbb\n bbb' == node .query (f"SELECT symbol FROM { CATALOG_NAME } .`{ namespace } .{ table_name_2 } `" ).strip ()
0 commit comments