|
3 | 3 | import argparse |
4 | 4 | import copy |
5 | 5 | import json |
6 | | -import typing |
| 6 | +import typing as t |
7 | 7 |
|
8 | 8 |
|
9 | | -def process(in_file: typing.TextIO, out_file: typing.TextIO, include_prefix_list: typing.List[str]) -> None: |
| 9 | +def main(): |
| 10 | + parser = argparse.ArgumentParser() |
| 11 | + parser.add_argument('-o', '--output', type=argparse.FileType('w'), help='Output filtered SARIF file') |
| 12 | + parser.add_argument('--include-prefix', required=True, action='append', |
| 13 | + help='File prefix for source code to include in analysis') |
| 14 | + parser.add_argument('--exclude-text-contains', action='append', default=[], |
| 15 | + help='Exclude results whose message.text contains this substring (may be repeated)') |
| 16 | + parser.add_argument('input_file', type=argparse.FileType('r'), help='Input SARIF file') |
| 17 | + args = parser.parse_args() |
| 18 | + process(args.input_file, args.output, args.include_prefix, args.exclude_text_contains) |
| 19 | + |
| 20 | + |
| 21 | +def process(in_file: t.TextIO, out_file: t.TextIO, include_prefix_list: t.List[str], exclude_text_contains_list: t.List[str]) -> None: |
10 | 22 | in_json = json.load(in_file) |
11 | 23 | if len(in_json['runs']) != 1: |
12 | 24 | raise NotImplementedError('Only 1 run is supported') |
13 | 25 | in_results = in_json['runs'][0]['results'] |
14 | 26 | out_results = [] |
15 | 27 | for result in in_results: |
16 | | - locations = result['locations'] |
17 | | - if len(locations) != 1: |
18 | | - raise NotImplementedError('Only 1 location is supported') |
19 | | - artifact_location = locations[0]['physicalLocation']['artifactLocation'] |
20 | | - uri = artifact_location['uri'] |
21 | | - new_uri = None |
22 | | - for include_prefix in include_prefix_list: |
23 | | - if uri.startswith(include_prefix): |
24 | | - new_uri = uri.replace(include_prefix, '') |
25 | | - break |
26 | | - if not new_uri: |
27 | | - continue |
28 | | - new_result = copy.deepcopy(result) |
29 | | - new_result['locations'][0]['physicalLocation']['artifactLocation']['uri'] = new_uri |
30 | | - out_results.append(new_result) |
| 28 | + transformed = transform_result(result, include_prefix_list, exclude_text_contains_list) |
| 29 | + if transformed is not None: |
| 30 | + out_results.append(transformed) |
31 | 31 |
|
32 | 32 | out_json = copy.deepcopy(in_json) |
33 | 33 | out_json['runs'][0]['results'] = out_results |
34 | 34 | json.dump(out_json, out_file, indent=True) |
35 | 35 |
|
36 | 36 |
|
37 | | -def main(): |
38 | | - parser = argparse.ArgumentParser() |
39 | | - parser.add_argument('-o', '--output', type=argparse.FileType('w'), help='Output filtered SARIF file') |
40 | | - parser.add_argument('--include-prefix', required=True, action='append', |
41 | | - help='File prefix for source code to include in analysis') |
42 | | - parser.add_argument('input_file', type=argparse.FileType('r'), help='Input SARIF file') |
43 | | - args = parser.parse_args() |
44 | | - process(args.input_file, args.output, args.include_prefix) |
| 37 | +def normalize_uri_optional(uri: t.Optional[str], include_prefix_list: t.List[str], strict: bool) -> t.Optional[str]: |
| 38 | + if uri is None: |
| 39 | + return None |
| 40 | + for include_prefix in include_prefix_list: |
| 41 | + if uri.startswith(include_prefix): |
| 42 | + return uri.replace(include_prefix, '') |
| 43 | + return None if strict else uri |
| 44 | + |
| 45 | + |
| 46 | +def message_contains_any(text: str, substrings: t.List[str]) -> bool: |
| 47 | + return any(substr in text for substr in substrings) |
| 48 | + |
| 49 | + |
| 50 | +def dedupe_related_locations(related_locations: t.Any, include_prefix_list: t.List[str]) -> t.List[t.Dict[str, t.Any]]: |
| 51 | + if not isinstance(related_locations, list) or not related_locations: |
| 52 | + return [] |
| 53 | + seen_keys: t.Set[t.Tuple[t.Any, ...]] = set() |
| 54 | + deduped: t.List[t.Dict[str, t.Any]] = [] |
| 55 | + for rel in related_locations: |
| 56 | + if not isinstance(rel, dict): |
| 57 | + continue |
| 58 | + rel_msg_text = rel['message']['text'] |
| 59 | + rel_uri = rel['physicalLocation']['artifactLocation']['uri'] |
| 60 | + rel_uri_norm = normalize_uri_optional(rel_uri, include_prefix_list, strict=False) |
| 61 | + rel['physicalLocation']['artifactLocation']['uri'] = rel_uri_norm |
| 62 | + key = (rel_msg_text, |
| 63 | + rel_uri_norm, |
| 64 | + rel['physicalLocation']['region']['startLine'], |
| 65 | + rel['physicalLocation']['region']['startColumn']) |
| 66 | + if key in seen_keys: |
| 67 | + continue |
| 68 | + seen_keys.add(key) |
| 69 | + deduped.append(rel) |
| 70 | + return deduped |
| 71 | + |
| 72 | + |
| 73 | +def transform_result(result: t.Dict[str, t.Any], include_prefix_list: t.List[str], exclude_text_contains_list: t.List[str]) -> t.Optional[t.Dict[str, t.Any]]: |
| 74 | + locations = result['locations'] |
| 75 | + if len(locations) != 1: |
| 76 | + raise NotImplementedError('Only 1 location is supported') |
| 77 | + artifact_location = locations[0]['physicalLocation']['artifactLocation'] |
| 78 | + uri = artifact_location['uri'] |
| 79 | + normalized_uri = normalize_uri_optional(uri, include_prefix_list, strict=True) |
| 80 | + if not normalized_uri: |
| 81 | + return None |
| 82 | + message_text = result['message']['text'] |
| 83 | + if message_contains_any(message_text, exclude_text_contains_list): |
| 84 | + return None |
| 85 | + new_result = copy.deepcopy(result) |
| 86 | + new_result['locations'][0]['physicalLocation']['artifactLocation']['uri'] = normalized_uri |
| 87 | + deduped_related = dedupe_related_locations(new_result.get('relatedLocations'), include_prefix_list) |
| 88 | + if deduped_related: |
| 89 | + new_result['relatedLocations'] = deduped_related |
| 90 | + elif 'relatedLocations' in new_result: |
| 91 | + # Ensure we have a list per schema even if empty |
| 92 | + new_result['relatedLocations'] = [] |
| 93 | + return new_result |
45 | 94 |
|
46 | 95 |
|
47 | 96 | if __name__ == '__main__': |
|
0 commit comments