Skip to content

Commit ef8a945

Browse files
authored
Merge pull request #825 from Altinity/customizations/24.3.18
2 parents 3f49b6a + 8c10ebe commit ef8a945

File tree

12 files changed

+413
-108
lines changed

12 files changed

+413
-108
lines changed

.github/create_workflow_report.py

Lines changed: 64 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -253,11 +253,17 @@ def get_commit_statuses(sha: str) -> pd.DataFrame:
253253
for item in all_data
254254
]
255255

256-
return (
257-
pd.DataFrame(parsed)
258-
.sort_values(by=["job_status", "job_name"], ascending=[True, True])
259-
.reset_index(drop=True)
260-
)
256+
# Create DataFrame
257+
df = pd.DataFrame(parsed)
258+
259+
# Drop duplicates keeping the first occurrence (newest status for each context)
260+
# GitHub returns statuses in reverse chronological order
261+
df = df.drop_duplicates(subset=["job_name"], keep="first")
262+
263+
# Sort by status and job name
264+
return df.sort_values(
265+
by=["job_status", "job_name"], ascending=[True, True]
266+
).reset_index(drop=True)
261267

262268

263269
def get_pr_info_from_number(pr_number: str) -> dict:
@@ -291,28 +297,50 @@ def get_checks_fails(client: Client, job_url: str):
291297
Get tests that did not succeed for the given job URL.
292298
Exclude checks that have status 'error' as they are counted in get_checks_errors.
293299
"""
294-
columns = "check_status as job_status, check_name as job_name, test_status, test_name, report_url as results_link"
295-
query = f"""SELECT {columns} FROM `gh-data`.checks
296-
WHERE task_url LIKE '{job_url}%'
297-
AND test_status IN ('FAIL', 'ERROR')
298-
AND check_status!='error'
299-
ORDER BY check_name, test_name
300-
"""
300+
query = f"""SELECT job_status, job_name, status as test_status, test_name, results_link
301+
FROM (
302+
SELECT
303+
argMax(check_status, check_start_time) as job_status,
304+
check_name as job_name,
305+
argMax(test_status, check_start_time) as status,
306+
test_name,
307+
report_url as results_link,
308+
task_url
309+
FROM `gh-data`.checks
310+
GROUP BY check_name, test_name, report_url, task_url
311+
)
312+
WHERE task_url LIKE '{job_url}%'
313+
AND test_status IN ('FAIL', 'ERROR')
314+
AND job_status!='error'
315+
ORDER BY job_name, test_name
316+
"""
301317
return client.query_dataframe(query)
302318

303319

304320
def get_checks_known_fails(client: Client, job_url: str, known_fails: dict):
305321
"""
306322
Get tests that are known to fail for the given job URL.
307323
"""
308-
assert len(known_fails) > 0, "cannot query the database with empty known fails"
309-
columns = "check_status as job_status, check_name as job_name, test_status, test_name, report_url as results_link"
310-
query = f"""SELECT {columns} FROM `gh-data`.checks
311-
WHERE task_url LIKE '{job_url}%'
312-
AND test_status='BROKEN'
313-
AND test_name IN ({','.join(f"'{test}'" for test in known_fails.keys())})
314-
ORDER BY test_name, check_name
315-
"""
324+
if len(known_fails) == 0:
325+
return pd.DataFrame()
326+
327+
query = f"""SELECT job_status, job_name, status as test_status, test_name, results_link
328+
FROM (
329+
SELECT
330+
argMax(check_status, check_start_time) as job_status,
331+
check_name as job_name,
332+
argMax(test_status, check_start_time) as status,
333+
test_name,
334+
report_url as results_link,
335+
task_url
336+
FROM `gh-data`.checks
337+
GROUP BY check_name, test_name, report_url, task_url
338+
)
339+
WHERE task_url LIKE '{job_url}%'
340+
AND test_status='BROKEN'
341+
AND test_name IN ({','.join(f"'{test}'" for test in known_fails.keys())})
342+
ORDER BY job_name, test_name
343+
"""
316344

317345
df = client.query_dataframe(query)
318346

@@ -333,12 +361,22 @@ def get_checks_errors(client: Client, job_url: str):
333361
"""
334362
Get checks that have status 'error' for the given job URL.
335363
"""
336-
columns = "check_status as job_status, check_name as job_name, test_status, test_name, report_url as results_link"
337-
query = f"""SELECT {columns} FROM `gh-data`.checks
338-
WHERE task_url LIKE '{job_url}%'
339-
AND check_status=='error'
340-
ORDER BY check_name, test_name
341-
"""
364+
query = f"""SELECT job_status, job_name, status as test_status, test_name, results_link
365+
FROM (
366+
SELECT
367+
argMax(check_status, check_start_time) as job_status,
368+
check_name as job_name,
369+
argMax(test_status, check_start_time) as status,
370+
test_name,
371+
report_url as results_link,
372+
task_url
373+
FROM `gh-data`.checks
374+
GROUP BY check_name, test_name, report_url, task_url
375+
)
376+
WHERE task_url LIKE '{job_url}%'
377+
AND job_status=='error'
378+
ORDER BY job_name, test_name
379+
"""
342380
return client.query_dataframe(query)
343381

344382

.github/workflows/release_branches.yml

Lines changed: 52 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -398,51 +398,51 @@ jobs:
398398
##############################################################################################
399399
######################################### STRESS TESTS #######################################
400400
##############################################################################################
401-
StressTestAsan:
402-
needs: [RunConfig, BuilderDebAsan]
403-
if: ${{ !failure() && !cancelled() }}
404-
uses: ./.github/workflows/reusable_test.yml
405-
secrets: inherit
406-
with:
407-
test_name: Stress test (asan)
408-
runner_type: altinity-func-tester
409-
data: ${{ needs.RunConfig.outputs.data }}
410-
StressTestTsan:
411-
needs: [RunConfig, BuilderDebTsan]
412-
if: ${{ !failure() && !cancelled() }}
413-
uses: ./.github/workflows/reusable_test.yml
414-
secrets: inherit
415-
with:
416-
test_name: Stress test (tsan)
417-
runner_type: altinity-func-tester
418-
data: ${{ needs.RunConfig.outputs.data }}
419-
StressTestMsan:
420-
needs: [RunConfig, BuilderDebMsan]
421-
if: ${{ !failure() && !cancelled() }}
422-
uses: ./.github/workflows/reusable_test.yml
423-
secrets: inherit
424-
with:
425-
test_name: Stress test (msan)
426-
runner_type: altinity-func-tester
427-
data: ${{ needs.RunConfig.outputs.data }}
428-
StressTestUBsan:
429-
needs: [RunConfig, BuilderDebUBsan]
430-
if: ${{ !failure() && !cancelled() }}
431-
uses: ./.github/workflows/reusable_test.yml
432-
secrets: inherit
433-
with:
434-
test_name: Stress test (ubsan)
435-
runner_type: altinity-func-tester
436-
data: ${{ needs.RunConfig.outputs.data }}
437-
StressTestDebug:
438-
needs: [RunConfig, BuilderDebDebug]
439-
if: ${{ !failure() && !cancelled() }}
440-
uses: ./.github/workflows/reusable_test.yml
441-
secrets: inherit
442-
with:
443-
test_name: Stress test (debug)
444-
runner_type: altinity-func-tester
445-
data: ${{ needs.RunConfig.outputs.data }}
401+
# StressTestAsan:
402+
# needs: [RunConfig, BuilderDebAsan]
403+
# if: ${{ !failure() && !cancelled() }}
404+
# uses: ./.github/workflows/reusable_test.yml
405+
# secrets: inherit
406+
# with:
407+
# test_name: Stress test (asan)
408+
# runner_type: altinity-func-tester
409+
# data: ${{ needs.RunConfig.outputs.data }}
410+
# StressTestTsan:
411+
# needs: [RunConfig, BuilderDebTsan]
412+
# if: ${{ !failure() && !cancelled() }}
413+
# uses: ./.github/workflows/reusable_test.yml
414+
# secrets: inherit
415+
# with:
416+
# test_name: Stress test (tsan)
417+
# runner_type: altinity-func-tester
418+
# data: ${{ needs.RunConfig.outputs.data }}
419+
# StressTestMsan:
420+
# needs: [RunConfig, BuilderDebMsan]
421+
# if: ${{ !failure() && !cancelled() }}
422+
# uses: ./.github/workflows/reusable_test.yml
423+
# secrets: inherit
424+
# with:
425+
# test_name: Stress test (msan)
426+
# runner_type: altinity-func-tester
427+
# data: ${{ needs.RunConfig.outputs.data }}
428+
# StressTestUBsan:
429+
# needs: [RunConfig, BuilderDebUBsan]
430+
# if: ${{ !failure() && !cancelled() }}
431+
# uses: ./.github/workflows/reusable_test.yml
432+
# secrets: inherit
433+
# with:
434+
# test_name: Stress test (ubsan)
435+
# runner_type: altinity-func-tester
436+
# data: ${{ needs.RunConfig.outputs.data }}
437+
# StressTestDebug:
438+
# needs: [RunConfig, BuilderDebDebug]
439+
# if: ${{ !failure() && !cancelled() }}
440+
# uses: ./.github/workflows/reusable_test.yml
441+
# secrets: inherit
442+
# with:
443+
# test_name: Stress test (debug)
444+
# runner_type: altinity-func-tester
445+
# data: ${{ needs.RunConfig.outputs.data }}
446446
#############################################################################################
447447
############################# INTEGRATION TESTS #############################################
448448
#############################################################################################
@@ -487,7 +487,7 @@ jobs:
487487
#############################################################################################
488488
RegressionTestsRelease:
489489
needs: [RunConfig, BuilderDebRelease]
490-
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.RunConfig.outputs.data).ci_settings.exclude_keywords, 'regression')}}
490+
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.RunConfig.outputs.data).ci_options.exclude_keywords, 'regression')}}
491491
uses: ./.github/workflows/regression.yml
492492
secrets: inherit
493493
with:
@@ -498,7 +498,7 @@ jobs:
498498
timeout_minutes: 300
499499
RegressionTestsAarch64:
500500
needs: [RunConfig, BuilderDebAarch64]
501-
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.RunConfig.outputs.data).ci_settings.exclude_keywords, 'regression') && !contains(fromJson(needs.RunConfig.outputs.data).ci_settings.exclude_keywords, 'aarch64')}}
501+
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.RunConfig.outputs.data).ci_options.exclude_keywords, 'regression') && !contains(fromJson(needs.RunConfig.outputs.data).ci_options.exclude_keywords, 'aarch64')}}
502502
uses: ./.github/workflows/regression.yml
503503
secrets: inherit
504504
with:
@@ -546,11 +546,11 @@ jobs:
546546
- FunctionalStatefulTestTsan
547547
- FunctionalStatefulTestMsan
548548
- FunctionalStatefulTestUBsan
549-
- StressTestDebug
550-
- StressTestAsan
551-
- StressTestTsan
552-
- StressTestMsan
553-
- StressTestUBsan
549+
# - StressTestDebug
550+
# - StressTestAsan
551+
# - StressTestTsan
552+
# - StressTestMsan
553+
# - StressTestUBsan
554554
- IntegrationTestsAsan
555555
- IntegrationTestsTsan
556556
- IntegrationTestsRelease

docs/en/engines/table-engines/integrations/jdbc.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,8 @@ ENGINE = JDBC(datasource_uri, external_database, external_table)
3939

4040
- `external_table` — Name of the table in `external_database` or a select query like `select * from table1 where column1=1`.
4141

42+
- These parameters can also be passed using [named collections](operations/named-collections.md).
43+
4244
## Usage Example {#usage-example}
4345

4446
Creating a table in MySQL server by connecting directly with it’s console client:

docs/en/engines/table-engines/integrations/odbc.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,8 @@ The table structure can differ from the source table structure:
3838
- `external_database` — Name of a database in an external DBMS.
3939
- `external_table` — Name of a table in the `external_database`.
4040

41+
These parameters can also be passed using [named collections](operations/named-collections.md).
42+
4143
## Usage Example {#usage-example}
4244

4345
**Retrieving data from the local MySQL installation via ODBC**

docs/en/sql-reference/table-functions/jdbc.md

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,19 @@ clickhouse-jdbc-bridge contains experimental codes and is no longer supported. I
1111
ClickHouse recommend using built-in table functions in ClickHouse which provide a better alternative for ad-hoc querying scenarios (Postgres, MySQL, MongoDB, etc).
1212
:::
1313

14-
`jdbc(datasource, schema, table)` - returns table that is connected via JDBC driver.
14+
JDBC table function returns table that is connected via JDBC driver.
1515

1616
This table function requires separate [clickhouse-jdbc-bridge](https://github.com/ClickHouse/clickhouse-jdbc-bridge) program to be running.
1717
It supports Nullable types (based on DDL of remote table that is queried).
1818

19+
## Syntax {#syntax}
20+
21+
```sql
22+
jdbc(datasource, schema, table)
23+
jdbc(datasource, table)
24+
jdbc(named_collection)
25+
```
26+
1927
**Examples**
2028

2129
``` sql

docs/en/sql-reference/table-functions/odbc.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,8 @@ Returns table that is connected via [ODBC](https://en.wikipedia.org/wiki/Open_Da
1010

1111
``` sql
1212
odbc(connection_settings, external_database, external_table)
13+
odbc(connection_settings, external_table)
14+
odbc(named_collection)
1315
```
1416

1517
Parameters:
@@ -18,6 +20,8 @@ Parameters:
1820
- `external_database` — Name of a database in an external DBMS.
1921
- `external_table` — Name of a table in the `external_database`.
2022

23+
These parameters can also be passed using [named collections](operations/named-collections.md).
24+
2125
To safely implement ODBC connections, ClickHouse uses a separate program `clickhouse-odbc-bridge`. If the ODBC driver is loaded directly from `clickhouse-server`, driver problems can crash the ClickHouse server. ClickHouse automatically starts `clickhouse-odbc-bridge` when it is required. The ODBC bridge program is installed from the same package as the `clickhouse-server`.
2226

2327
The fields with the `NULL` values from the external table are converted into the default values for the base data type. For example, if a remote MySQL table field has the `INT NULL` type it is converted to 0 (the default value for ClickHouse `Int32` data type).

src/Interpreters/AggregationCommon.h

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -132,6 +132,9 @@ static inline T ALWAYS_INLINE packFixed(
132132
{
133133
size_t index = i;
134134
const IColumn * column = key_columns[j];
135+
auto column_possibly_converted = column->convertToFullColumnIfSparse()->convertToFullColumnIfConst();
136+
column = column_possibly_converted.get();
137+
135138
if constexpr (has_low_cardinality)
136139
{
137140
if (const IColumn * positions = (*low_cardinality_positions)[j])
@@ -224,26 +227,30 @@ static inline T ALWAYS_INLINE packFixed(
224227
if (is_null)
225228
continue;
226229

230+
const IColumn * key_column = key_columns[j];
231+
auto column_possibly_converted = key_column->convertToFullColumnIfSparse()->convertToFullColumnIfConst();
232+
key_column = column_possibly_converted.get();
233+
227234
switch (key_sizes[j])
228235
{
229236
case 1:
230-
memcpy(bytes + offset, static_cast<const ColumnFixedSizeHelper *>(key_columns[j])->getRawDataBegin<1>() + i, 1);
237+
memcpy(bytes + offset, static_cast<const ColumnFixedSizeHelper *>(key_column)->getRawDataBegin<1>() + i, 1);
231238
offset += 1;
232239
break;
233240
case 2:
234-
memcpy(bytes + offset, static_cast<const ColumnFixedSizeHelper *>(key_columns[j])->getRawDataBegin<2>() + i * 2, 2);
241+
memcpy(bytes + offset, static_cast<const ColumnFixedSizeHelper *>(key_column)->getRawDataBegin<2>() + i * 2, 2);
235242
offset += 2;
236243
break;
237244
case 4:
238-
memcpy(bytes + offset, static_cast<const ColumnFixedSizeHelper *>(key_columns[j])->getRawDataBegin<4>() + i * 4, 4);
245+
memcpy(bytes + offset, static_cast<const ColumnFixedSizeHelper *>(key_column)->getRawDataBegin<4>() + i * 4, 4);
239246
offset += 4;
240247
break;
241248
case 8:
242-
memcpy(bytes + offset, static_cast<const ColumnFixedSizeHelper *>(key_columns[j])->getRawDataBegin<8>() + i * 8, 8);
249+
memcpy(bytes + offset, static_cast<const ColumnFixedSizeHelper *>(key_column)->getRawDataBegin<8>() + i * 8, 8);
243250
offset += 8;
244251
break;
245252
default:
246-
memcpy(bytes + offset, static_cast<const ColumnFixedSizeHelper *>(key_columns[j])->getRawDataBegin<1>() + i * key_sizes[j], key_sizes[j]);
253+
memcpy(bytes + offset, static_cast<const ColumnFixedSizeHelper *>(key_column)->getRawDataBegin<1>() + i * key_sizes[j], key_sizes[j]);
247254
offset += key_sizes[j];
248255
}
249256
}

0 commit comments

Comments
 (0)