Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 27ca4f0

Browse files
committedMay 15, 2024·
scripts: ci: Detect API-breaking changes in the PRs
This script will check the PR's changes in the header files and DTS bindings that may affects compatibility of the public API. This will reduce risk of accidentally breaking the API. Workflow that runs on each PR will add a comment with analysis summary. Signed-off-by: Dominik Kilian <Dominik.Kilian@nordicsemi.no>
1 parent cd66e53 commit 27ca4f0

22 files changed

+2288
-0
lines changed
 

‎.github/workflows/api-check.yml

+129
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,129 @@
1+
name: API Check
2+
3+
on:
4+
pull_request:
5+
branches:
6+
- main
7+
workflow_dispatch:
8+
inputs:
9+
new_commit:
10+
type: string
11+
required: true
12+
description: New Commit
13+
old_commit:
14+
type: string
15+
required: true
16+
description: Old Commit
17+
18+
jobs:
19+
build:
20+
runs-on: ubuntu-latest
21+
concurrency:
22+
group: ${{ github.workflow }}-${{ github.ref }}
23+
cancel-in-progress: true
24+
steps:
25+
- name: Checkout sources
26+
uses: nordicbuilder/action-checkout-west-update@main
27+
with:
28+
git-fetch-depth: 0
29+
west-update-args: ''
30+
31+
- name: cache-pip
32+
uses: actions/cache@v3
33+
with:
34+
path: ~/.cache/pip
35+
key: ${{ runner.os }}-doc-pip
36+
37+
- name: Git rebase
38+
if: github.event_name == 'pull_request'
39+
env:
40+
BASE_REF: ${{ github.base_ref }}
41+
working-directory: ncs/nrf
42+
run: |
43+
git remote -v
44+
git branch
45+
git rebase origin/${BASE_REF}
46+
# debug
47+
git log --pretty=oneline -n 5
48+
49+
- name: Install packages
50+
run: |
51+
sudo apt update
52+
sudo apt-get install -y ninja-build mscgen plantuml
53+
sudo snap install yq
54+
DOXYGEN_VERSION=$(yq ".doxygen.version" ./ncs/nrf/scripts/tools-versions-linux.yml)
55+
wget --no-verbose "https://github.com/doxygen/doxygen/releases/download/Release_${DOXYGEN_VERSION//./_}/doxygen-${DOXYGEN_VERSION}.linux.bin.tar.gz"
56+
tar xf doxygen-${DOXYGEN_VERSION}.linux.bin.tar.gz
57+
echo "${PWD}/doxygen-${DOXYGEN_VERSION}/bin" >> $GITHUB_PATH
58+
cp -r ncs/nrf/scripts/ci/api_check .
59+
60+
- name: Install Python dependencies
61+
working-directory: ncs
62+
run: |
63+
sudo pip3 install -U setuptools wheel pip
64+
pip3 install -r nrf/doc/requirements.txt
65+
pip3 install -r ../api_check/requirements.txt
66+
67+
- name: West zephyr-export
68+
working-directory: ncs
69+
run: |
70+
west zephyr-export
71+
72+
- name: Checkout new commit and west update
73+
if: github.event_name == 'workflow_dispatch'
74+
working-directory: ncs/nrf
75+
run: |
76+
git checkout ${{ github.event.inputs.new_commit }}
77+
west update
78+
79+
- name: Collect data from new commit
80+
working-directory: ncs/nrf
81+
run: |
82+
source ../zephyr/zephyr-env.sh
83+
echo =========== NEW COMMIT ===========
84+
git log -n 1
85+
cmake -GNinja -Bdoc/_build -Sdoc
86+
python3 ../../api_check/utils/interrupt_on.py "syncing doxygen output" ninja -C doc/_build nrf
87+
python3 ../../api_check/headers doc/_build/nrf/doxygen/xml --save-input ../../headers-new.pkl
88+
python3 ../../api_check/dts -n - --save-input ../../dts-new.pkl
89+
rm -Rf doc/_build
90+
91+
- name: Checkout old commit and west update
92+
working-directory: ncs/nrf
93+
run: |
94+
git checkout ${{ github.event.inputs.old_commit }}${{ github.base_ref }}
95+
cd ..
96+
west update
97+
98+
- name: Collect data from old commit
99+
working-directory: ncs/nrf
100+
run: |
101+
source ../zephyr/zephyr-env.sh
102+
echo =========== OLD COMMIT ===========
103+
git log -n 1
104+
cmake -GNinja -Bdoc/_build -Sdoc
105+
python3 ../../api_check/utils/interrupt_on.py "syncing doxygen output" ninja -C doc/_build nrf
106+
python3 ../../api_check/headers doc/_build/nrf/doxygen/xml --save-input ../../headers-old.pkl
107+
python3 ../../api_check/dts -n - --save-input ../../dts-old.pkl
108+
109+
- name: Check
110+
working-directory: ncs/nrf
111+
run: |
112+
python3 ../../api_check/headers --format github --resolve-paths . --relative-to . --save-stats ../../headers-stats.json ../../headers-new.pkl ../../headers-old.pkl || true
113+
python3 ../../api_check/dts --format github --relative-to . --save-stats ../../dts-stats.json -n ../../dts-new.pkl -o ../../dts-old.pkl || true
114+
echo Headers stats
115+
cat ../../headers-stats.json || true
116+
echo DTS stats
117+
cat ../../dts-stats.json || true
118+
119+
- name: Update PR
120+
if: github.event_name == 'pull_request'
121+
working-directory: ncs/nrf
122+
env:
123+
PR_NUMBER: ${{ github.event.number }}
124+
GITHUB_ACTOR: ${{ github.actor }}
125+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
126+
GITHUB_REPO: ${{ github.repository }}
127+
GITHUB_RUN_ID: ${{ github.run_id }}
128+
run: |
129+
python3 ../../api_check/pr ../../headers-stats.json ../../dts-stats.json

‎scripts/ci/api_check/dts/__main__.py

+2
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
from main import main
2+
main()

‎scripts/ci/api_check/dts/args.py

+66
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
# Copyright (c) 2024 Nordic Semiconductor ASA
2+
#
3+
# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause
4+
5+
import sys
6+
import argparse
7+
from pathlib import Path
8+
9+
10+
class ArgsClass:
11+
new: 'list[list[str]]'
12+
old: 'list[list[str]]|None'
13+
format: str
14+
relative_to: 'Path | None'
15+
save_stats: 'Path | None'
16+
save_input: 'Path | None'
17+
save_old_input: 'Path | None'
18+
dump_json: 'Path | None'
19+
20+
21+
def parse_args() -> ArgsClass:
22+
parser = argparse.ArgumentParser(add_help=False,
23+
description='Detect DTS binding changes.')
24+
parser.add_argument('-n', '--new', nargs='+', action='append', required=True,
25+
help='List of directories where to search the new DTS binding. ' +
26+
'The "-" will use the "ZEPHYR_BASE" environment variable to find ' +
27+
'DTS binding in default directories.')
28+
parser.add_argument('-o', '--old', nargs='+', action='append',
29+
help='List of directories where to search the old DTS binding. ' +
30+
'The "-" will use the "ZEPHYR_BASE" environment variable to find ' +
31+
'DTS binding in default directories. You should skip this if you ' +
32+
'want to pre-parse the input with the "--save-input" option.')
33+
parser.add_argument('--format', choices=('text', 'github'), default='text',
34+
help='Output format. Default is "text".')
35+
parser.add_argument('--relative-to', type=Path,
36+
help='Show relative paths in messages.')
37+
parser.add_argument('--save-stats', type=Path,
38+
help='Save statistics to JSON file.')
39+
parser.add_argument('--save-input', metavar='FILE', type=Path,
40+
help='Pre-parse and save the new input to a file. The file format may change ' +
41+
'from version to version. Use always the same version ' +
42+
'of this tool for one file.')
43+
parser.add_argument('--save-old-input', metavar='FILE', type=Path,
44+
help='Pre-parse and save the old input to a file.')
45+
parser.add_argument('--dump-json', metavar='FILE', type=Path,
46+
help='Dump input data to a JSON file (only for debug purposes).')
47+
parser.add_argument('--help', action='help',
48+
help='Show this help and exit.')
49+
args: ArgsClass = parser.parse_args()
50+
51+
if (args.old is None) and (args.save_input is None):
52+
parser.print_usage()
53+
print('error: at least one of the following arguments is required: old-input, --save-input', file=sys.stderr)
54+
sys.exit(2)
55+
56+
args.relative_to = args.relative_to.absolute() if args.relative_to else None
57+
58+
return args
59+
60+
61+
args: ArgsClass = parse_args()
62+
63+
64+
if __name__ == '__main__':
65+
import json
66+
print(json.dumps(args.__dict__, indent=4, default=lambda x: str(x)))
+110
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,110 @@
1+
# Copyright (c) 2024 Nordic Semiconductor ASA
2+
#
3+
# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause
4+
5+
import sys
6+
import pickle
7+
from pathlib import Path
8+
from utils import devicetree_sources, warning
9+
10+
if devicetree_sources:
11+
sys.path.insert(0, devicetree_sources)
12+
13+
from devicetree import edtlib
14+
15+
16+
class ParseResult:
17+
bindings: 'list[Binding]'
18+
binding_by_name: 'dict[str, Binding]'
19+
def __init__(self):
20+
self.bindings = []
21+
self.binding_by_name = {}
22+
23+
class Property:
24+
name: str
25+
type: str
26+
description: str
27+
enum: 'set[str]'
28+
const: 'str | None'
29+
default: 'str | None'
30+
deprecated: bool
31+
required: bool
32+
specifier_space: str
33+
34+
def __init__(self, prop: edtlib.PropertySpec):
35+
self.name = prop.name
36+
self.type = prop.type or ''
37+
self.description = prop.description or ''
38+
self.enum = set([ str(x) for x in (prop.enum or []) ])
39+
self.const = str(prop.const) if prop.const else None
40+
self.default = str(prop.default) if prop.default else None
41+
self.deprecated = prop.deprecated or False
42+
self.required = prop.required or False
43+
self.specifier_space = str(prop.specifier_space or '')
44+
45+
class Binding:
46+
path: str
47+
name: str
48+
description: str
49+
cells: str
50+
buses: str
51+
properties: 'dict[str, Property]'
52+
53+
def __init__(self, binding: edtlib.Binding, file: Path):
54+
self.path = str(file)
55+
self.name = binding.compatible or self.path
56+
if binding.on_bus is not None:
57+
self.name += '@' + binding.on_bus
58+
self.description = binding.description or ''
59+
cells_array = [
60+
f'{name}={";".join(value)}' for name, value in (binding.specifier2cells or {}).items()
61+
]
62+
cells_array.sort()
63+
self.cells = '&'.join(cells_array)
64+
busses_array = list(binding.buses or [])
65+
busses_array.sort()
66+
self.buses = ';'.join(busses_array)
67+
self.properties = {}
68+
for key, value in (binding.prop2specs or {}).items():
69+
prop = Property(value)
70+
self.properties[key] = prop
71+
72+
73+
def get_binding_files(bindings_dirs: 'list[Path]') -> 'list[Path]':
74+
binding_files = []
75+
for bindings_dir in bindings_dirs:
76+
if not bindings_dir.is_dir():
77+
raise FileNotFoundError(f'Bindings directory "{bindings_dir}" not found.')
78+
for file in bindings_dir.glob('**/*.yaml'):
79+
binding_files.append(file)
80+
for file in bindings_dir.glob('**/*.yml'):
81+
binding_files.append(file)
82+
return binding_files
83+
84+
85+
def parse_bindings(dirs_or_pickle: 'list[Path]|Path') -> ParseResult:
86+
result = ParseResult()
87+
if isinstance(dirs_or_pickle, list):
88+
yaml_files = get_binding_files(dirs_or_pickle)
89+
fname2path: 'dict[str, str]' = {
90+
path.name: str(path) for path in yaml_files
91+
}
92+
for binding_file in yaml_files:
93+
try:
94+
binding = Binding(edtlib.Binding(str(binding_file), fname2path, None, False, False), binding_file)
95+
if binding.name in result.binding_by_name:
96+
warning(f'Repeating binding {binding.name}: {binding.path} {result.binding_by_name[binding.name].path}')
97+
result.bindings.append(binding)
98+
result.binding_by_name[binding.name] = binding
99+
except edtlib.EDTError as err:
100+
warning(err)
101+
else:
102+
with open(dirs_or_pickle, 'rb') as fd:
103+
result = pickle.load(fd)
104+
return result
105+
106+
107+
def save_bindings(parse_result: ParseResult, file: Path):
108+
with open(file, 'wb') as fd:
109+
pickle.dump(parse_result, fd)
110+

‎scripts/ci/api_check/dts/compare.py

+154
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,154 @@
1+
# Copyright (c) 2024 Nordic Semiconductor ASA
2+
#
3+
# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause
4+
5+
from typing import Any
6+
from bindings_parser import Binding, Property, ParseResult
7+
8+
9+
UNCHANGED = 'unchanged'
10+
ADDED = 'added'
11+
DELETED = 'deleted'
12+
MODIFIED = 'modified'
13+
SET = 'set'
14+
CLEARED = 'cleared'
15+
16+
17+
class AnyChange:
18+
kind: str
19+
action: str
20+
def __init__(self, action: str, new: Any, old: Any):
21+
self.action = action
22+
self.new = new
23+
self.old = old
24+
25+
26+
class BindingChange(AnyChange):
27+
kind = 'binding'
28+
new: Binding
29+
old: Binding
30+
path: bool = False
31+
description: bool = False
32+
cells: bool = False
33+
buses: bool = False
34+
properties: 'list[PropertyChange]'
35+
def __init__(self, action: str, new: Any, old: Any):
36+
super().__init__(action, new, old)
37+
self.properties = []
38+
39+
40+
class PropertyChange(AnyChange):
41+
kind = 'property'
42+
new: Property
43+
old: Property
44+
type: bool = False
45+
description: bool = False
46+
enum: 'list[EnumChange]'
47+
const: str = UNCHANGED
48+
default: str = UNCHANGED
49+
deprecated: str = UNCHANGED
50+
required: str = UNCHANGED
51+
specifier_space: bool = False
52+
def __init__(self, action: str, new: Any, old: Any):
53+
super().__init__(action, new, old)
54+
self.enum = []
55+
56+
57+
class EnumChange(AnyChange):
58+
kind = 'enum'
59+
new: str
60+
old: str
61+
62+
63+
def get_str_action(new: 'str | None', old: 'str | None') -> str:
64+
if (new is None) and (old is None):
65+
return UNCHANGED
66+
elif (new is None) and (old is not None):
67+
return DELETED
68+
elif (new is not None) and (old is None):
69+
return ADDED
70+
else:
71+
return MODIFIED if new != old else UNCHANGED
72+
73+
74+
def get_bool_action(new: bool, old: bool) -> str:
75+
if (not new) and old:
76+
return CLEARED
77+
elif new and (not old):
78+
return SET
79+
elif (new is not None) and (old is None):
80+
return UNCHANGED
81+
82+
83+
def compare_properties(new: 'dict[str, Property]', old: 'dict[str, Property]') -> 'list[PropertyChange]':
84+
new_keys = set(new.keys())
85+
old_keys = set(old.keys())
86+
added_keys = new_keys.difference(old_keys)
87+
deleted_keys = old_keys.difference(new_keys)
88+
remaining_keys = new_keys.intersection(old_keys)
89+
result: 'list[PropertyChange]' = []
90+
for key in added_keys:
91+
property_change = PropertyChange(ADDED, new[key], new[key])
92+
result.append(property_change)
93+
for key in deleted_keys:
94+
property_change = PropertyChange(DELETED, old[key], old[key])
95+
result.append(property_change)
96+
for key in remaining_keys:
97+
new_property = new[key]
98+
old_property = old[key]
99+
property_change = PropertyChange(MODIFIED, new[key], old[key])
100+
property_change.type = new_property.type != old_property.type
101+
property_change.description = new_property.description != old_property.description
102+
property_change.const = get_str_action(new_property.const, old_property.const)
103+
property_change.default = get_str_action(new_property.default, old_property.default)
104+
property_change.deprecated = get_bool_action(new_property.deprecated, old_property.deprecated)
105+
property_change.required = get_bool_action(new_property.required, old_property.required)
106+
property_change.specifier_space = new_property.specifier_space != old_property.specifier_space
107+
for enum_value in new_property.enum.difference(old_property.enum):
108+
property_change.enum.append(EnumChange(ADDED, enum_value, enum_value))
109+
for enum_value in old_property.enum.difference(new_property.enum):
110+
property_change.enum.append(EnumChange(DELETED, enum_value, enum_value))
111+
changed = (
112+
property_change.type or
113+
property_change.description or
114+
property_change.const != UNCHANGED or
115+
property_change.default != UNCHANGED or
116+
property_change.deprecated != UNCHANGED or
117+
property_change.required != UNCHANGED or
118+
property_change.specifier_space or
119+
len(property_change.enum))
120+
if changed:
121+
result.append(property_change)
122+
return result
123+
124+
125+
def compare(new: ParseResult, old: ParseResult) -> 'list[BindingChange]':
126+
new_keys = set(new.binding_by_name.keys())
127+
old_keys = set(old.binding_by_name.keys())
128+
added_keys = new_keys.difference(old_keys)
129+
deleted_keys = old_keys.difference(new_keys)
130+
remaining_keys = new_keys.intersection(old_keys)
131+
result: 'list[BindingChange]' = []
132+
for key in added_keys:
133+
binding_change = BindingChange(ADDED, new.binding_by_name[key], new.binding_by_name[key])
134+
result.append(binding_change)
135+
for key in deleted_keys:
136+
binding_change = BindingChange(DELETED, old.binding_by_name[key], old.binding_by_name[key])
137+
result.append(binding_change)
138+
for key in remaining_keys:
139+
new_binding = new.binding_by_name[key]
140+
old_binding = old.binding_by_name[key]
141+
binding_change = BindingChange(MODIFIED, new.binding_by_name[key], old.binding_by_name[key])
142+
binding_change.path = new_binding.path != old_binding.path
143+
binding_change.description = new_binding.description != old_binding.description
144+
binding_change.buses = new_binding.buses != old_binding.buses
145+
binding_change.cells = new_binding.cells != old_binding.cells
146+
binding_change.properties = compare_properties(new_binding.properties, old_binding.properties)
147+
changed = (binding_change.path or
148+
binding_change.description or
149+
binding_change.buses or
150+
binding_change.cells or
151+
len(binding_change.properties))
152+
if changed:
153+
result.append(binding_change)
154+
return result

‎scripts/ci/api_check/dts/main.py

+132
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
# Copyright (c) 2024 Nordic Semiconductor ASA
2+
#
3+
# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause
4+
5+
import os
6+
import sys
7+
import json
8+
import out_text
9+
from json import JSONEncoder
10+
from pathlib import Path
11+
from utils import error
12+
from args import args
13+
from bindings_parser import parse_bindings, save_bindings
14+
from compare import compare
15+
16+
17+
def collect_zephyr_inputs() -> 'list[Path]':
18+
zephyr_base = os.getenv('ZEPHYR_BASE')
19+
if zephyr_base is None:
20+
error('You must specify "ZEPHYR_BASE" if you using "-" argument as an input.')
21+
sys.exit(1)
22+
zephyr_base = Path(zephyr_base)
23+
west_root = zephyr_base.parent
24+
bindings_dirs: 'list[Path]' = []
25+
for bindings_dir in west_root.glob('**/dts/bindings'):
26+
rel = bindings_dir.relative_to(west_root)
27+
if str(rel).count('test') or str(rel).count('sample'):
28+
continue
29+
bindings_dirs.append(bindings_dir)
30+
return bindings_dirs
31+
32+
33+
def collect_inputs(arguments: 'list[list[str]]') -> 'list[Path]|Path':
34+
result_list: 'list[Path]' = []
35+
result_pickle: 'Path|None' = None
36+
for arg_list in arguments:
37+
for arg in arg_list:
38+
if arg == '-':
39+
result_list.extend(collect_zephyr_inputs())
40+
else:
41+
arg = Path(arg)
42+
if arg.is_file():
43+
result_pickle = arg
44+
else:
45+
result_list.append(arg)
46+
if len(result_list) and result_pickle is not None:
47+
error('Expecting pickled file or list of directories. Not both.')
48+
sys.exit(1)
49+
return result_pickle or result_list
50+
51+
52+
def dump_json(file: Path, **kwargs):
53+
def default_encode(o):
54+
this_id = id(o)
55+
if this_id in ids:
56+
return f'__id__{this_id}'
57+
if isinstance(o, set):
58+
return list(o)
59+
else:
60+
ids.add(this_id)
61+
d = {'__id__': f'__id__{this_id}'}
62+
for name in tuple(dir(o)):
63+
if not name.startswith('_'):
64+
value = getattr(o, name)
65+
if not callable(value):
66+
d[name] = value
67+
return d
68+
ids = set()
69+
with open(file, 'w') as fd:
70+
fd.write('{\n')
71+
first = True
72+
for name, value in kwargs.items():
73+
json = JSONEncoder(sort_keys=False, indent=2, default=default_encode).encode(value)
74+
if not first:
75+
fd.write(',\n')
76+
fd.write(f'"{name}": {json}')
77+
first = False
78+
fd.write('\n}\n')
79+
80+
def main():
81+
new_input = parse_bindings(collect_inputs(args.new))
82+
83+
if args.old:
84+
old_input = parse_bindings(collect_inputs(args.old))
85+
else:
86+
old_input = None
87+
88+
if args.save_input:
89+
save_bindings(new_input, args.save_input)
90+
91+
if args.save_old_input and old_input:
92+
save_bindings(old_input, args.save_old_input)
93+
94+
if args.dump_json:
95+
if old_input:
96+
dump_json(args.dump_json,
97+
new_bindings=new_input.bindings,
98+
new_binding_by_name=new_input.binding_by_name,
99+
old_bindings=old_input.bindings,
100+
old_binding_by_name=old_input.binding_by_name)
101+
else:
102+
dump_json(args.dump_json,
103+
bindings=new_input.bindings,
104+
binding_by_name=new_input.binding_by_name)
105+
106+
level = 0
107+
108+
if old_input:
109+
changes = compare(new_input, old_input)
110+
if args.dump_json:
111+
dump_json(args.dump_json,
112+
new_bindings=new_input.bindings,
113+
new_binding_by_name=new_input.binding_by_name,
114+
old_bindings=old_input.bindings,
115+
old_binding_by_name=old_input.binding_by_name,
116+
changes=changes)
117+
stats = out_text.generate(changes)
118+
if args.save_stats:
119+
args.save_stats.write_text(json.dumps({
120+
'notice': stats[1],
121+
'warning': stats[2],
122+
'critical': stats[3],
123+
}, indent=2))
124+
for i in range(len(stats)):
125+
if stats[i] > 0:
126+
level = i
127+
128+
sys.exit(level)
129+
130+
131+
if __name__ == '__main__':
132+
main()

‎scripts/ci/api_check/dts/out_text.py

+155
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,155 @@
1+
# Copyright (c) 2024 Nordic Semiconductor ASA
2+
#
3+
# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause
4+
5+
import re
6+
from pathlib import Path
7+
from jinja2 import Template
8+
from compare import AnyChange, BindingChange
9+
from utils import compile_messages
10+
from args import args
11+
12+
13+
spaces_re = re.compile(r'\s+')
14+
15+
16+
messages: 'dict[str, Template]' = compile_messages({
17+
'binding-added': 'ignore',
18+
'binding-deleted': 'critical: Binding "{{new.name}}" deleted.',
19+
'binding-modified-path': 'ignore',
20+
'binding-modified-description': 'notice: Binding "{{new.name}}" description changed.',
21+
'binding-modified-cells': 'notice: Binding "{{new.name}}" cells definition changed from "{{old.cells}}" to "{{new.cells}}".',
22+
'binding-modified-buses': 'warning: Binding "{{new.name}}" buses definition changed from "{{old.buses}}" to "{{new.buses}}".',
23+
'property-added': '''
24+
{% if new.const is not none %}
25+
ignore
26+
{% elif new.default is not none %}
27+
notice: Property "{{new.name}}" of "{{binding.new.name}}" added with default value.
28+
{% elif not new.required %}
29+
warning: Property "{{new.name}}" of "{{binding.new.name}}" added, but it is not required.
30+
{% else %}
31+
critical: Required property "{{new.name}}" of "{{binding.new.name}}" added.
32+
{% endif %}
33+
''',
34+
'property-deleted': '''
35+
{% if new.deprecated %}
36+
notice: Deprecated property "{{new.name}}" of "{{binding.new.name}}" deleted.
37+
{% else %}
38+
critical: Property "{{new.name}}" of "{{binding.new.name}}" deleted.
39+
{% endif %}
40+
''',
41+
'property-modified-type': '{% if new.const is not none %} critical: Property "{{new.name}}" of "{{binding.new.name}}" type changed. {% endif %}',
42+
'property-modified-description': '{% if new.const is not none %} notice: Property "{{new.name}}" of "{{binding.new.name}}" description changed. {% endif %}',
43+
'property-modified-const-added': 'critical: Property "{{new.name}}" of "{{binding.new.name}}" const value set.',
44+
'property-modified-const-deleted': '''
45+
{% if new.default is none %}
46+
critical: Property "{{new.name}}" of "{{binding.new.name}}" const value removed.
47+
{% else %}
48+
notice: Property "{{new.name}}" of "{{binding.new.name}}" const value replaced by default value.
49+
{% endif %}
50+
''',
51+
'property-modified-const-modified': 'ignore',
52+
'property-modified-default-added': 'ignore',
53+
'property-modified-default-deleted': 'critical: Property "{{new.name}}" of "{{binding.new.name}}" default value removed.',
54+
'property-modified-default-modified': 'critical: Property "{{new.name}}" of "{{binding.new.name}}" default value modified.',
55+
'property-modified-deprecated-set': 'ignore',
56+
'property-modified-deprecated-cleared': 'ignore',
57+
'property-modified-required-set': 'critical: Property "{{new.name}}" of "{{binding.new.name}}" is now required.',
58+
'property-modified-required-cleared': 'ignore',
59+
'property-modified-specifier_space': '{% if new.const is not none %} warning: Property "{{new.name}}" of "{{binding.new.name}}" specifier space changed. {% endif %}',
60+
'enum-added': 'ignore',
61+
'enum-deleted': 'critical: Enum value "{{new}}" of property "{{property.new.name}}" of "{{binding.new.name}}" deleted.',
62+
})
63+
64+
65+
def get_message_level(message: str) -> int:
66+
if message.startswith('ignore') or (message == ''):
67+
return 0
68+
elif message.startswith('notice'):
69+
return 1
70+
elif message.startswith('warning'):
71+
return 2
72+
elif message.startswith('critical'):
73+
return 3
74+
else:
75+
raise ValueError(f'Unknown level of message: {message}')
76+
77+
github_commands = [
78+
'::ignore',
79+
'::notice',
80+
'::warning',
81+
'::error'
82+
]
83+
84+
github_titles = [
85+
'Ignore',
86+
'Notice',
87+
'Warning',
88+
'Critical',
89+
]
90+
91+
def encode(text: str, is_message: bool):
92+
if is_message:
93+
return text.replace('%', '%25').replace('\r', '%0D').replace('\n', '%0A')
94+
else:
95+
return text.replace('%', '%25').replace('\r', '%0D').replace('\n', '%0A').replace(',', '%2C').replace('::', '%3A%3A')
96+
97+
def show_message(file: Path, line: 'str | int | None', message: str, level: int):
98+
if args.relative_to is not None:
99+
file = file.relative_to(args.relative_to)
100+
if args.format == 'github':
101+
command = github_commands[level]
102+
title = f'Compatibility {github_titles[level]}'
103+
if line is not None:
104+
print(f'{command} file={encode(str(file), False)},line={line},title={title}::{encode(message, True)}')
105+
else:
106+
print(f'{command} file={encode(str(file), False)},title={title}::{encode(message, True)}')
107+
elif line is not None:
108+
print(f'{file}:{line}: {message}')
109+
else:
110+
print(f'{file}: {message}')
111+
112+
113+
def generate_changes(stats: 'list[int]', changes: 'list[AnyChange]',
114+
location: Path, **kwargs) -> int:
115+
max_level = 0
116+
for change in changes:
117+
loc = Path(change.new.path) if isinstance(change, BindingChange) else location
118+
prefix = f'{change.kind}-{change.action}'
119+
for key, template in messages.items():
120+
if not key.startswith(prefix):
121+
continue
122+
matched = False
123+
if key == prefix:
124+
matched = True
125+
else:
126+
parts = key[len(prefix) + 1:].split('-')
127+
field = parts[0]
128+
expected = parts[1] if (len(parts) > 1) else True
129+
value = getattr(change, field)
130+
if value == expected:
131+
matched = True
132+
if not matched:
133+
continue
134+
data = {}
135+
for name in dir(change):
136+
value = getattr(change, name)
137+
if (not callable(value)) and (not name.startswith('_')):
138+
data[name] = value
139+
for name, value in kwargs.items():
140+
data[name] = value
141+
message = spaces_re.sub(template.render(**data), ' ').strip()
142+
level = get_message_level(message)
143+
if level > 0:
144+
show_message(loc, None, message, level)
145+
stats[level] += 1
146+
if prefix == 'binding-modified':
147+
generate_changes(stats, change.properties, loc, binding=change)
148+
elif prefix == 'property-modified':
149+
generate_changes(stats, change.enum, loc, property=change, **kwargs)
150+
151+
152+
def generate(compare_result: 'list[BindingChange]'):
153+
stats = [0, 0, 0, 0]
154+
generate_changes(stats, compare_result, Path())
155+
return stats

‎scripts/ci/api_check/dts/utils.py

+45
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
# Copyright (c) 2024 Nordic Semiconductor ASA
2+
#
3+
# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause
4+
5+
import os
6+
import sys
7+
from typing import Callable, Iterable
8+
from pathlib import Path
9+
from jinja2 import Template
10+
11+
12+
def warning(*args, **kwargs):
13+
args = ('\x1B[33mwarning:\x1B[0m', *args)
14+
print(*args, **kwargs, file=sys.stderr)
15+
16+
17+
def error(*args, **kwargs):
18+
args = ('\x1B[31merror:\x1B[0m', *args)
19+
print(*args, **kwargs, file=sys.stderr)
20+
21+
22+
def compile_messages(messages):
23+
result = {}
24+
for key in messages.keys():
25+
result[key] = Template(messages[key])
26+
return result
27+
28+
29+
def find_devicetree_sources() -> 'str|None':
30+
sources = None
31+
zephyr_base = os.getenv('ZEPHYR_BASE')
32+
if zephyr_base is not None:
33+
zephyr_base = Path(zephyr_base)
34+
sources = zephyr_base / 'scripts/dts/python-devicetree/src'
35+
if sources.exists():
36+
return str(sources)
37+
west_root = Path(__file__).parent.parent.absolute()
38+
for i in range(0, 6):
39+
sources = west_root / 'zephyr/scripts/dts/python-devicetree/src'
40+
if sources.exists():
41+
return str(sources)
42+
west_root = west_root.parent
43+
return None
44+
45+
devicetree_sources = find_devicetree_sources()
+2
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
from main import main
2+
main()

‎scripts/ci/api_check/headers/args.py

+65
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
# Copyright (c) 2024 Nordic Semiconductor ASA
2+
#
3+
# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause
4+
5+
import sys
6+
import argparse
7+
from pathlib import Path
8+
9+
10+
class ArgsClass:
11+
new_input: Path
12+
old_input: 'Path | None'
13+
format: str
14+
resolve_paths: 'Path | None'
15+
relative_to: 'Path | None'
16+
save_stats: 'Path | None'
17+
save_input: 'Path | None'
18+
save_old_input: 'Path | None'
19+
dump_json: 'Path | None'
20+
21+
22+
def parse_args() -> ArgsClass:
23+
parser = argparse.ArgumentParser(add_help=False,
24+
description='Detect API changes based on doxygen XML output.')
25+
parser.add_argument('new_input', metavar='new-input', type=Path,
26+
help='The directory containing doxygen XML output or pre-parsed file with ' +
27+
'the new API version. For details about ' +
28+
'doxygen XML output, see https://www.doxygen.nl/manual/output.html.')
29+
parser.add_argument('old_input', metavar='old-input', nargs='?', type=Path,
30+
help='The directory containing doxygen XML output or pre-parsed file with ' +
31+
'the old API version. You should skip this if you want to pre-parse ' +
32+
'the input with the "--save-input" option.')
33+
parser.add_argument('--format', choices=('text', 'github'), default='text',
34+
help='Output format. Default is "text".')
35+
parser.add_argument('--resolve-paths', type=Path,
36+
help='Resolve relative paths from doxygen input using this parameter as ' +
37+
'base directory.')
38+
parser.add_argument('--relative-to', type=Path,
39+
help='Show relative paths in messages.')
40+
parser.add_argument('--save-stats', type=Path,
41+
help='Save statistics to JSON file.')
42+
parser.add_argument('--save-input', metavar='FILE', type=Path,
43+
help='Pre-parse and save the "new-input" to a file. The file format may change ' +
44+
'from version to version. Use always the same version ' +
45+
'of this tool for one file.')
46+
parser.add_argument('--save-old-input', metavar='FILE', type=Path,
47+
help='Pre-parse and save the "old-input" to a file.')
48+
parser.add_argument('--dump-json', metavar='FILE', type=Path,
49+
help='Dump input data to a JSON file (only for debug purposes).')
50+
parser.add_argument('--help', action='help',
51+
help='Show this help and exit.')
52+
args: ArgsClass = parser.parse_args()
53+
54+
if (args.old_input is None) and (args.save_input is None):
55+
parser.print_usage()
56+
print('error: at least one of the following arguments is required: old-input, --save-input')
57+
sys.exit(2)
58+
59+
args.resolve_paths = args.resolve_paths.absolute() if args.resolve_paths else None
60+
args.relative_to = args.relative_to.absolute() if args.relative_to else None
61+
62+
return args
63+
64+
65+
args: ArgsClass = parse_args()

‎scripts/ci/api_check/headers/compare.py

+407
Large diffs are not rendered by default.

‎scripts/ci/api_check/headers/dox_parser.py

+379
Large diffs are not rendered by default.

‎scripts/ci/api_check/headers/main.py

+56
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
# Copyright (c) 2024 Nordic Semiconductor ASA
2+
#
3+
# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause
4+
5+
import sys
6+
import json
7+
import out_text
8+
from args import args
9+
from compare import compare
10+
from dox_parser import dump_doxygen_json, parse_doxygen, save_doxygen
11+
12+
13+
def main():
14+
new_input = parse_doxygen(args.new_input)
15+
16+
if args.old_input:
17+
old_input = parse_doxygen(args.old_input)
18+
else:
19+
old_input = None
20+
21+
if args.save_input:
22+
save_doxygen(new_input, args.save_input)
23+
24+
if args.save_old_input and old_input:
25+
save_doxygen(old_input, args.save_old_input)
26+
27+
if args.dump_json:
28+
if old_input:
29+
dir = args.dump_json.parent
30+
name = args.dump_json.name
31+
suffix = args.dump_json.suffix
32+
dump_doxygen_json(new_input, dir / (name[0:-len(args.dump_json.suffix)] + '.new' + suffix))
33+
dump_doxygen_json(old_input, dir / (name[0:-len(args.dump_json.suffix)] + '.old' + suffix))
34+
else:
35+
dump_doxygen_json(new_input, args.dump_json)
36+
37+
level = 0
38+
39+
if old_input:
40+
changes = compare(new_input, old_input)
41+
stats = out_text.generate(changes)
42+
if args.save_stats:
43+
args.save_stats.write_text(json.dumps({
44+
'notice': stats[1],
45+
'warning': stats[2],
46+
'critical': stats[3],
47+
}, indent=2))
48+
for i in range(len(stats)):
49+
if stats[i] > 0:
50+
level = i
51+
52+
sys.exit(level)
53+
54+
55+
if __name__ == '__main__':
56+
main()

‎scripts/ci/api_check/headers/nodes.py

+103
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,103 @@
1+
# Copyright (c) 2024 Nordic Semiconductor ASA
2+
#
3+
# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause
4+
5+
6+
class Node:
7+
id: str
8+
kind: str = ''
9+
name: str = ''
10+
file: str = ''
11+
line: str = ''
12+
parent_ids: 'set[str] | None' = None
13+
children_ids: 'set[str] | None' = None
14+
desc: str = ''
15+
def __init__(self, id: str, name: str):
16+
self.id = id
17+
self.name = name
18+
def get_short_id(self):
19+
return self.kind + ':' + str(self.name)
20+
def add_parent(self, parent: str):
21+
if not self.parent_ids:
22+
self.parent_ids = set()
23+
self.parent_ids.add(parent)
24+
def add_child(self, child: str):
25+
if not self.children_ids:
26+
self.children_ids = set()
27+
self.children_ids.add(child)
28+
29+
30+
class File(Node):
31+
kind: str = 'file'
32+
33+
34+
class Group(Node):
35+
kind: str = 'group'
36+
title: str = ''
37+
38+
39+
class SimpleNode(Node):
40+
type: str = ''
41+
42+
43+
class StructField(SimpleNode):
44+
kind: str = 'field'
45+
index: int = 0
46+
47+
48+
class Struct(Node):
49+
kind: str
50+
is_union: bool
51+
fields: 'list[StructField]'
52+
def __init__(self, id: str, name: str, is_union: bool):
53+
super().__init__(id, name)
54+
self.is_union = is_union
55+
self.kind = 'union' if is_union else 'struct'
56+
self.fields = []
57+
58+
59+
class Param:
60+
index: int
61+
name: str
62+
type: str
63+
desc: str
64+
65+
66+
class FunctionLike(Node):
67+
params: 'list[Param]'
68+
def __init__(self, id: str, name: str):
69+
super().__init__(id, name)
70+
self.params = []
71+
def add_param(self):
72+
param = Param()
73+
param.index = len(self.params)
74+
self.params.append(param)
75+
return param
76+
77+
78+
class Function(FunctionLike):
79+
kind: str = 'func'
80+
return_type: str = 'void'
81+
82+
83+
class Define(FunctionLike):
84+
kind: str = 'def'
85+
value: str = ''
86+
87+
88+
class EnumValue(Node):
89+
kind: str = 'enum_value'
90+
value: str
91+
92+
93+
class Enum(Node):
94+
kind: str = 'enum'
95+
96+
97+
class Typedef(SimpleNode):
98+
kind: str = 'typedef'
99+
100+
101+
class Variable(SimpleNode):
102+
kind: str = 'var'
103+
+157
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,157 @@
1+
# Copyright (c) 2024 Nordic Semiconductor ASA
2+
#
3+
# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause
4+
5+
from pathlib import Path
6+
from compare import AnyChange, CompareResult
7+
from jinja2 import Template
8+
from args import args
9+
10+
11+
def compile_messages(messages):
12+
result = {}
13+
for key in list(messages.keys()):
14+
message = messages[key]
15+
if message.startswith('ignore'):
16+
level = 0
17+
elif message.startswith('notice'):
18+
level = 1
19+
elif message.startswith('warning'):
20+
level = 2
21+
elif message.startswith('critical'):
22+
level = 3
23+
else:
24+
raise ValueError(f'Unknown level of message: {message}')
25+
result[key] = (Template(messages[key]), level)
26+
return result
27+
28+
29+
messages: 'dict[str, tuple[Template, int]]' = compile_messages({
30+
'typedef-added': 'ignore',
31+
'typedef-deleted': 'critical: Type "{{old.name}}" definition deleted.',
32+
'typedef-modified-file': 'warning: Type "{{new.name}}" definition moved to a different file.',
33+
'typedef-modified-desc': 'notice: Type "{{new.name}}" definition description changed.',
34+
'typedef-modified-type': 'warning: Type "{{new.name}}" definition changed.',
35+
'var-added': 'ignore',
36+
'var-deleted': 'critical: Variable "{{old.name}}" deleted.',
37+
'var-modified-file': 'warning: Variable "{{new.name}}" moved to a different file.',
38+
'var-modified-desc': 'notice: Variable "{{new.name}}" description changed.',
39+
'var-modified-type': 'warning: Variable "{{new.name}}" type changed.',
40+
'enum_value-added': 'ignore',
41+
'enum_value-deleted': 'critical: Enum value "{{old.name}}" deleted.',
42+
'enum_value-modified-value': 'warning: Enum value "{{new.name}}" changed.',
43+
'enum_value-modified-desc': 'notice: Enum value "{{new.name}}" description changed.',
44+
'enum_value-modified-file': 'warning: Enum value "{{new.name}}" moved to a different file.',
45+
'enum-added': 'ignore',
46+
'enum-deleted': 'critical: Enum "{{old.name}}" deleted.',
47+
'enum-modified-file': 'warning: Enum "{{new.name}}" moved to a different file.',
48+
'enum-modified-desc': 'notice: Enum "{{new.name}}" description changed.',
49+
'struct-added': 'ignore',
50+
'struct-deleted': 'critical: Structure "{{old.name}}" deleted.',
51+
'struct-modified-file': 'warning: Structure "{{new.name}}" moved to a different file.',
52+
'struct-modified-desc': 'notice: Structure "{{new.name}}" description changed.',
53+
'func-added': 'ignore',
54+
'func-deleted': 'critical: Function "{{old.name}}" deleted.',
55+
'func-modified-return_type': 'warning: Function "{{new.name}}" return type changed.',
56+
'func-modified-file': 'warning: Function "{{new.name}}" moved to a different file.',
57+
'func-modified-desc': 'notice: Function "{{new.name}}" description changed.',
58+
'def-added': 'ignore',
59+
'def-deleted': 'critical: Definition "{{old.name}}" deleted.',
60+
'def-modified-value': 'notice: Definition "{{new.name}}" value changed.',
61+
'def-modified-file': 'warning: Definition "{{new.name}}" moved to a different file.',
62+
'def-modified-desc': 'notice: Definition "{{new.name}}" description changed.',
63+
'field-added': 'ignore',
64+
'field-deleted': 'critical: Structure "{{struct.new.name}}" field "{{new.name}}" deleted.',
65+
'field-modified-index': 'ignore',
66+
'field-modified-type': 'warning: Structure "{{struct.new.name}}" field "{{new.name}}" type changed.',
67+
'field-modified-desc': 'notice: Structure "{{struct.new.name}}" field "{{new.name}}" description changed.',
68+
'param-added': 'critical: Parameter "{{new.name}}" added in "{{parent.new.name}}".',
69+
'param-deleted': 'critical: Parameter "{{old.name}}" deleted from "{{parent.new.name}}".',
70+
'param-modified-index': 'critical: Parameter "{{new.name}}" reordered in "{{parent.new.name}}".',
71+
'param-modified-type': 'warning: Parameter "{{new.name}}" type changed in "{{parent.new.name}}".',
72+
'param-modified-desc': 'notice: Parameter "{{new.name}}" description changed in "{{parent.new.name}}".',
73+
})
74+
75+
76+
github_commands = [
77+
'::ignore',
78+
'::notice',
79+
'::warning',
80+
'::error'
81+
]
82+
83+
github_titles = [
84+
'Ignore',
85+
'Notice',
86+
'Warning',
87+
'Critical',
88+
]
89+
90+
def encode(text: str, is_message: bool):
91+
if is_message:
92+
return text.replace('%', '%25').replace('\r', '%0D').replace('\n', '%0A')
93+
else:
94+
return text.replace('%', '%25').replace('\r', '%0D').replace('\n', '%0A').replace(',', '%2C').replace('::', '%3A%3A')
95+
96+
def show_message(file: Path, line: 'str | int | None', message: str, level: int):
97+
if args.resolve_paths is not None:
98+
file = args.resolve_paths.joinpath(file).absolute()
99+
if args.relative_to is not None:
100+
file = file.relative_to(args.relative_to)
101+
if args.format == 'github':
102+
command = github_commands[level]
103+
title = f'Compatibility {github_titles[level]}'
104+
if line is not None:
105+
print(f'{command} file={encode(str(file), False)},line={line},title={title}::{encode(message, True)}')
106+
else:
107+
print(f'{command} file={encode(str(file), False)},title={title}::{encode(message, True)}')
108+
elif line is not None:
109+
print(f'{file}:{line}: {message}')
110+
else:
111+
print(f'{file}: {message}')
112+
113+
114+
115+
def generate_changes(stats: 'list[int]', changes: 'list[AnyChange]',
116+
location: 'tuple[Path, int | None]', **kwargs) -> int:
117+
for change in changes:
118+
prefix = f'{change.kind}-{change.action}'
119+
if change.new and hasattr(change.new, 'file') and change.new.file:
120+
if change.new and hasattr(change.new, 'line') and change.new.line:
121+
loc = (Path(change.new.file), change.new.line)
122+
else:
123+
loc = (Path(change.new.file), None)
124+
else:
125+
loc = location
126+
for key, (template, level) in messages.items():
127+
if key.startswith(prefix) and (level > 0):
128+
data = {}
129+
for name in dir(change):
130+
value = getattr(change, name)
131+
if (not callable(value)) and (not name.startswith('_')):
132+
data[name] = value
133+
for name, value in kwargs.items():
134+
data[name] = value
135+
message = template.render(**data)
136+
if key == prefix:
137+
show_message(loc[0], loc[1], message, level)
138+
stats[level] += 1
139+
else:
140+
field = key[len(prefix) + 1:]
141+
value = getattr(change, field)
142+
if (value):
143+
show_message(loc[0], loc[1], message, level)
144+
stats[level] += 1
145+
if prefix == 'struct-modified':
146+
level = generate_changes(stats, change.fields, loc, struct=change)
147+
elif prefix in ('func-modified', 'def-modified'):
148+
level = generate_changes(stats, change.params, loc, parent=change)
149+
150+
151+
def generate(compare_result: CompareResult) -> 'list[int]':
152+
stats = [0, 0, 0, 0]
153+
for group in compare_result.groups:
154+
if (group.name):
155+
print(f'=== Group {group.name}: {group.title} ===')
156+
generate_changes(stats, group.changes, (Path(), None))
157+
return stats

‎scripts/ci/api_check/headers/utils.py

+58
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
# Copyright (c) 2024 Nordic Semiconductor ASA
2+
#
3+
# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause
4+
5+
import os
6+
import sys
7+
import concurrent.futures
8+
from typing import Callable, Iterable
9+
10+
11+
def warning(*args, **kwargs):
12+
args = ('\x1B[33mwarning:\x1B[0m', *args)
13+
print(*args, **kwargs, file=sys.stderr)
14+
15+
16+
def error(*args, **kwargs):
17+
args = ('\x1B[31merror:\x1B[0m', *args)
18+
print(*args, **kwargs, file=sys.stderr)
19+
20+
21+
process_executor = None
22+
thread_executor = None
23+
24+
25+
def concurrent_pool_iter(func: Callable, iterable: Iterable, use_process: bool=False,
26+
threshold: int=2):
27+
''' Call a function for each item of iterable in a separate thread or process.
28+
29+
Number of parallel executors will be determined by the CPU count or command line arguments.
30+
31+
@param func Function to call
32+
@param iterable Input iterator
33+
@param use_process Runs function on separate process when True, thread if False
34+
@param threshold If number of elements in iterable is less than threshold, no parallel
35+
threads or processes will be started.
36+
@returns Iterator over tuples cotaining: return value of func, input element, index
37+
of that element (starting from 0)
38+
'''
39+
global process_executor, thread_executor, executor_workers
40+
collected = iterable if isinstance(iterable, tuple) else tuple(iterable)
41+
if len(collected) >= threshold:
42+
executor_workers = os.cpu_count() #args.processes if args.processes > 0 else os.cpu_count()
43+
if executor_workers is None or executor_workers < 1:
44+
executor_workers = 1
45+
if use_process:
46+
if process_executor is None:
47+
process_executor = concurrent.futures.ProcessPoolExecutor(executor_workers)
48+
executor = process_executor
49+
else:
50+
if thread_executor is None:
51+
thread_executor = concurrent.futures.ThreadPoolExecutor(executor_workers)
52+
executor = thread_executor
53+
chunksize = (len(collected) + executor_workers - 1) // executor_workers
54+
it = executor.map(func, collected, chunksize=chunksize)
55+
else:
56+
it = map(func, collected)
57+
return zip(it, collected, range(len(collected)))
58+

‎scripts/ci/api_check/pr/__main__.py

+2
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
from main import main
2+
main()

‎scripts/ci/api_check/pr/main.py

+122
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,122 @@
1+
# Copyright (c) 2024 Nordic Semiconductor ASA
2+
#
3+
# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause
4+
5+
import os
6+
import re
7+
import sys
8+
import json
9+
from pathlib import Path
10+
from github import Github
11+
from types import SimpleNamespace
12+
from jinja2 import Template
13+
from github.Repository import Repository
14+
from github.PullRequest import PullRequest
15+
from github.IssueComment import IssueComment
16+
from github.WorkflowRun import WorkflowRun
17+
18+
19+
API_CHECK_COMMENT_INDICATOR = '<!-- API-check comment -->'
20+
21+
22+
class TemplateData(SimpleNamespace):
23+
notice: int
24+
warning: int
25+
critical: int
26+
github_actor: str
27+
repo: Repository
28+
pr: PullRequest
29+
run: WorkflowRun
30+
def __init__(self, file: os.PathLike):
31+
with open(file, 'r') as fd:
32+
dict = json.load(fd)
33+
super().__init__(**dict)
34+
35+
def fatal(*args, **kwargs):
36+
print(*args, **kwargs, file=sys.stderr)
37+
sys.exit(1)
38+
39+
def get_stats() -> TemplateData:
40+
stats: 'TemplateData | None' = None
41+
for arg in sys.argv[1:]:
42+
if not Path(arg).exists():
43+
fatal(f'The "{arg}" does not exist. Probably checking script failed.')
44+
file_stats = TemplateData(arg)
45+
if stats:
46+
stats.notice += file_stats.notice
47+
stats.warning += file_stats.warning
48+
stats.critical += file_stats.critical
49+
else:
50+
stats = file_stats
51+
if stats is None:
52+
fatal('No input files.')
53+
return stats
54+
55+
def get_message(data: TemplateData) -> str:
56+
template_path: Path = Path(__file__).parent / 'pr-comment.md.jinja'
57+
template = Template(template_path.read_text())
58+
message = API_CHECK_COMMENT_INDICATOR + '\n' + template.render(**data.__dict__).strip()
59+
return message
60+
61+
def get_meta(message, keyword) -> list[str]:
62+
result = []
63+
for match in re.finditer(r'<!--\s*' + keyword + r':\s*(.*?)\s*-->', message, re.DOTALL):
64+
result.append(match.group(1))
65+
return result
66+
67+
def main():
68+
data = get_stats()
69+
print('Stats', data)
70+
71+
github = Github(os.environ['GITHUB_TOKEN'])
72+
print(f'Github API connected. Remaining requests {github.rate_limiting[0]} of {github.rate_limiting[1]}.')
73+
74+
data.github_actor = os.environ['GITHUB_ACTOR']
75+
print(f'Github user: {data.github_actor}')
76+
77+
data.repo = github.get_repo(os.environ['GITHUB_REPO'], lazy=True)
78+
data.pr = data.repo.get_pull(int(os.environ['PR_NUMBER']))
79+
print(f'Pull request: {data.pr.title} #{data.pr.number} {data.pr.html_url}')
80+
81+
data.run = data.repo.get_workflow_run(int(os.environ['GITHUB_RUN_ID']))
82+
print(f'Workflow run: {data.run.id}')
83+
84+
message = get_message(data)
85+
print(f'Comment message:\n{message}\n------------------------------------')
86+
87+
comment: 'IssueComment | None'
88+
for comment in data.pr.get_issue_comments():
89+
if comment.body.strip().startswith(API_CHECK_COMMENT_INDICATOR):
90+
if message == comment.body:
91+
print(f'Comment unchanged: {comment.html_url}')
92+
else:
93+
print(f'Editing comment: {comment.html_url}')
94+
comment.edit(message)
95+
break
96+
else:
97+
print(f'Adding new comment.')
98+
comment = data.pr.create_issue_comment(message)
99+
print(f'Added comment: {comment.html_url}')
100+
101+
labels = get_meta(message, 'add-label')
102+
if len(labels) > 0:
103+
print(f'Adding labels: {", ".join(labels)}')
104+
data.pr.add_to_labels(*labels)
105+
106+
for label in get_meta(message, 'remove-label'):
107+
print(f'Removing label: {label}')
108+
for existing_label in data.pr.labels:
109+
if existing_label.name == label:
110+
data.pr.remove_from_labels(label)
111+
break
112+
else:
113+
print(f'Label already removed: {label}')
114+
115+
exit_code = 0
116+
for value in get_meta(message, 'exit-code'):
117+
exit_code = int(value)
118+
sys.exit(exit_code)
119+
120+
121+
if __name__ == '__main__':
122+
main()
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
{#
2+
# Copyright (c) 2024 Nordic Semiconductor ASA
3+
#
4+
# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause
5+
6+
Jinja template for PR comment with API changes results.
7+
8+
The following variables are available:
9+
notice: int - number of notices detected
10+
warning: int - number of warnings detected
11+
critical: int - number of critical issues detected
12+
github_actor: str - github user name responsible for this workflow
13+
repo: Repository - https://pygithub.readthedocs.io/en/stable/github_objects/Repository.html
14+
pr: PullRequest - https://pygithub.readthedocs.io/en/stable/github_objects/PullRequest.html
15+
run: WorkflowRun - https://pygithub.readthedocs.io/en/stable/github_objects/WorkflowRun.html
16+
17+
You can add the following metadata:
18+
<!-- add-label: XYZ -->
19+
Add label XYZ to the PR.
20+
<!-- remove-label: XYZ -->
21+
Remove label XYZ from the PR.
22+
<!-- exit-code: N -->
23+
Set exit code of the script. Setting exit code different than 0 will cause
24+
an error in workflow and it will block the PR.
25+
#}
26+
27+
{% if critical > 0 %}
28+
29+
<!-- add-label: api-change -->
30+
<!-- add-label: doc-required -->
31+
32+
> [!CAUTION]
33+
> **This PR contains API-breaking changes. Remember to add necessary entry in the migration guide.**
34+
>
35+
36+
&nbsp; | Count | Level | Comment
37+
----------------|------------------|----------|---------
38+
:red_circle: | **{{critical}}** | critical | The modification is a breaking change.
39+
:yellow_circle: | **{{warning}}** | warning | The modification may be a breaking change, but there is not enough context to determine this.
40+
:white_circle: | **{{notice}}** | notice | The modification to API, but it probably keep the backward compatibility.
41+
42+
See issue details in the [**job summary**]({{run.html_url}}?pr={{pr.number}}).
43+
44+
{% elif notice + warning > 0 %}
45+
46+
<!-- remove-label: api-change -->
47+
48+
:+1: No critical API-breaking changes detected. You have [{{
49+
(warning|string) + " warnings" if warning > 1 else "1 warning" if warning > 0 else ""
50+
}}{{
51+
" and " if notice > 0 and warning > 0 else ""
52+
}}{{
53+
(notice|string) + " notices" if notice > 1 else "1 notice" if notice > 0 else ""
54+
}}]({{run.html_url}}?pr={{pr.number}}) that you can review.
55+
56+
{% else %}
57+
58+
<!-- remove-label: api-change -->
59+
60+
:+1: No API-breaking changes detected.
61+
62+
{% endif %}
63+
64+
<!-- exit-code: 0 -->
+31
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
#!/bin/bash
2+
# Copyright (c) 2024 Nordic Semiconductor ASA
3+
#
4+
# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause
5+
6+
set -e
7+
8+
#GITHUB_TOKEN= from keyboard if commented
9+
GITHUB_ACTOR=...user...
10+
GITHUB_REPO=...user_or_organization/repo_name...
11+
PR_NUMBER=...number...
12+
GITHUB_RUN_ID=...number...
13+
14+
rm -Rf /tmp/test-pr-api-check
15+
mkdir -p /tmp/test-pr-api-check
16+
echo '{ "notice": 1, "warning": 3, "critical": 0 }' > /tmp/test-pr-api-check/headers.stats.json
17+
echo '{ "notice": 1, "warning": 0, "critical": 1 }' > /tmp/test-pr-api-check/dts.stats.json
18+
19+
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
20+
21+
if [ -z "$GITHUB_TOKEN" ]; then
22+
read -p "GITHUB_TOKEN: " -s GITHUB_TOKEN
23+
fi
24+
25+
export GITHUB_TOKEN
26+
export GITHUB_ACTOR
27+
export GITHUB_REPO
28+
export PR_NUMBER
29+
export GITHUB_RUN_ID
30+
31+
python3 $SCRIPT_DIR /tmp/test-pr-api-check/headers.stats.json /tmp/test-pr-api-check/dts.stats.json

‎scripts/ci/api_check/requirements.txt

+4
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
doxmlparser ~= 1.10
2+
Jinja2 ~= 3.0
3+
psutil ~= 5.0
4+
PyGithub ~= 2.0
+45
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
# Copyright (c) 2024 Nordic Semiconductor ASA
2+
#
3+
# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause
4+
5+
import os
6+
import sys
7+
import psutil
8+
from signal import SIGINT
9+
from subprocess import Popen, PIPE, STDOUT
10+
11+
# For example to send Ctrl-C to ninja build when its outputs "syncing doxygen output":
12+
# python interrupt_on.py "syncing doxygen output" ninja nrf
13+
14+
def run_interrupted(match_text: str, command: 'list[str]'):
15+
print('Run', command)
16+
print(' and interrupt on:', match_text)
17+
match_text = match_text.lower()
18+
p = Popen(command, stdout=PIPE, stderr=None, encoding="utf-8")
19+
interrupted = False
20+
while True:
21+
line = p.stdout.readline()
22+
if line:
23+
print(line, end='')
24+
if line.lower().find(match_text) >= 0:
25+
print('Sending SIGINT signal.')
26+
parent = psutil.Process(p.pid)
27+
for child in parent.children(recursive=True):
28+
child.send_signal(SIGINT)
29+
parent.send_signal(SIGINT)
30+
interrupted = True
31+
if p.poll() is not None:
32+
break
33+
if interrupted:
34+
print('Correctly interrupted.')
35+
elif p.returncode:
36+
print(f'Failed with return code {p.returncode}.')
37+
sys.exit(p.returncode)
38+
else:
39+
print('Build not interrupted. You may experience long building time.')
40+
41+
if len(sys.argv) <= 2:
42+
print(f'Usage: {sys.argv[0]} "Matching string" command parameters...')
43+
sys.exit(1)
44+
45+
run_interrupted(sys.argv[1], sys.argv[2:])

0 commit comments

Comments
 (0)
Please sign in to comment.