Skip to content

Commit 2be339c

Browse files
committed
scripts: ci: Detect API-breaking changes in the PRs
This script will check the PR's changes in the header files and DTS bindings that may affects compatibility of the public API. This will reduce risk of accidentally breaking the API. Workflow that runs on each PR will add a comment with analysis summary. Signed-off-by: Dominik Kilian <Dominik.Kilian@nordicsemi.no>
1 parent cd66e53 commit 2be339c

22 files changed

+2278
-0
lines changed

.github/workflows/api-check.yml

+129
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,129 @@
1+
name: API Check
2+
3+
on:
4+
pull_request:
5+
branches:
6+
- main
7+
workflow_dispatch:
8+
inputs:
9+
new_commit:
10+
type: string
11+
required: true
12+
description: New Commit
13+
old_commit:
14+
type: string
15+
required: true
16+
description: Old Commit
17+
18+
jobs:
19+
build:
20+
runs-on: ubuntu-latest
21+
concurrency:
22+
group: ${{ github.workflow }}-${{ github.ref }}
23+
cancel-in-progress: true
24+
steps:
25+
- name: Checkout sources
26+
uses: nordicbuilder/action-checkout-west-update@main
27+
with:
28+
git-fetch-depth: 0
29+
west-update-args: ''
30+
31+
- name: cache-pip
32+
uses: actions/cache@v3
33+
with:
34+
path: ~/.cache/pip
35+
key: ${{ runner.os }}-doc-pip
36+
37+
- name: Git rebase
38+
if: github.event_name == 'pull_request'
39+
env:
40+
BASE_REF: ${{ github.base_ref }}
41+
working-directory: ncs/nrf
42+
run: |
43+
git remote -v
44+
git branch
45+
git rebase origin/${BASE_REF}
46+
# debug
47+
git log --pretty=oneline -n 5
48+
49+
- name: Install packages
50+
run: |
51+
sudo apt update
52+
sudo apt-get install -y ninja-build mscgen plantuml
53+
sudo snap install yq
54+
DOXYGEN_VERSION=$(yq ".doxygen.version" ./ncs/nrf/scripts/tools-versions-linux.yml)
55+
wget --no-verbose "https://github.com/doxygen/doxygen/releases/download/Release_${DOXYGEN_VERSION//./_}/doxygen-${DOXYGEN_VERSION}.linux.bin.tar.gz"
56+
tar xf doxygen-${DOXYGEN_VERSION}.linux.bin.tar.gz
57+
echo "${PWD}/doxygen-${DOXYGEN_VERSION}/bin" >> $GITHUB_PATH
58+
cp -r ncs/nrf/scripts/ci/api_check .
59+
60+
- name: Install Python dependencies
61+
working-directory: ncs
62+
run: |
63+
sudo pip3 install -U setuptools wheel pip
64+
pip3 install -r nrf/doc/requirements.txt
65+
pip3 install -r ../api_check/requirements.txt
66+
67+
- name: West zephyr-export
68+
working-directory: ncs
69+
run: |
70+
west zephyr-export
71+
72+
- name: Checkout new commit and west update
73+
if: github.event_name == 'workflow_dispatch'
74+
working-directory: ncs/nrf
75+
run: |
76+
git checkout ${{ github.event.inputs.new_commit }}
77+
west update
78+
79+
- name: Collect data from new commit
80+
working-directory: ncs/nrf
81+
run: |
82+
source ../zephyr/zephyr-env.sh
83+
echo =========== NEW COMMIT ===========
84+
git log -n 1
85+
cmake -GNinja -Bdoc/_build -Sdoc
86+
python3 ../../api_check/utils/interrupt_on.py "syncing doxygen output" ninja -C doc/_build nrf
87+
python3 ../../api_check/headers doc/_build/nrf/doxygen/xml --save-input ../../headers-new.pkl
88+
python3 ../../api_check/dts -n - --save-input ../../dts-new.pkl
89+
rm -Rf doc/_build
90+
91+
- name: Checkout old commit and west update
92+
working-directory: ncs/nrf
93+
run: |
94+
git checkout ${{ github.event.inputs.old_commit }}${{ github.base_ref }}
95+
cd ..
96+
west update
97+
98+
- name: Collect data from old commit
99+
working-directory: ncs/nrf
100+
run: |
101+
source ../zephyr/zephyr-env.sh
102+
echo =========== OLD COMMIT ===========
103+
git log -n 1
104+
cmake -GNinja -Bdoc/_build -Sdoc
105+
python3 ../../api_check/utils/interrupt_on.py "syncing doxygen output" ninja -C doc/_build nrf
106+
python3 ../../api_check/headers doc/_build/nrf/doxygen/xml --save-input ../../headers-old.pkl
107+
python3 ../../api_check/dts -n - --save-input ../../dts-old.pkl
108+
109+
- name: Check
110+
working-directory: ncs/nrf
111+
run: |
112+
python3 ../../api_check/headers --format github --resolve-paths . --relative-to . --save-stats ../../headers-stats.json ../../headers-new.pkl ../../headers-old.pkl || true
113+
python3 ../../api_check/dts --format github --relative-to . --save-stats ../../dts-stats.json -n ../../dts-new.pkl -o ../../dts-old.pkl || true
114+
echo Headers stats
115+
cat ../../headers-stats.json || true
116+
echo DTS stats
117+
cat ../../dts-stats.json || true
118+
119+
- name: Update PR
120+
if: github.event_name == 'pull_request'
121+
working-directory: ncs/nrf
122+
env:
123+
PR_NUMBER: ${{ github.event.number }}
124+
GITHUB_ACTOR: ${{ github.actor }}
125+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
126+
GITHUB_REPO: ${{ github.repository }}
127+
GITHUB_RUN_ID: ${{ github.run_id }}
128+
run: |
129+
python3 ../../api_check/pr ../../headers-stats.json ../../dts-stats.json

scripts/ci/api_check/dts/__main__.py

+2
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
from main import main
2+
main()

scripts/ci/api_check/dts/args.py

+61
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
# Copyright (c) 2024 Nordic Semiconductor ASA
2+
#
3+
# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause
4+
5+
import sys
6+
import argparse
7+
from pathlib import Path
8+
9+
10+
class ArgsClass:
11+
new: 'list[list[str]]'
12+
old: 'list[list[str]]|None'
13+
format: str
14+
relative_to: 'Path | None'
15+
save_stats: 'Path | None'
16+
save_input: 'Path | None'
17+
save_old_input: 'Path | None'
18+
dump_json: 'Path | None'
19+
20+
21+
def parse_args() -> ArgsClass:
22+
parser = argparse.ArgumentParser(add_help=False, allow_abbrev=False,
23+
description='Detect DTS binding changes.')
24+
parser.add_argument('-n', '--new', nargs='+', action='append', required=True,
25+
help='List of directories where to search the new DTS binding. ' +
26+
'The "-" will use the "ZEPHYR_BASE" environment variable to find ' +
27+
'DTS binding in default directories.')
28+
parser.add_argument('-o', '--old', nargs='+', action='append',
29+
help='List of directories where to search the old DTS binding. ' +
30+
'The "-" will use the "ZEPHYR_BASE" environment variable to find ' +
31+
'DTS binding in default directories. You should skip this if you ' +
32+
'want to pre-parse the input with the "--save-input" option.')
33+
parser.add_argument('--format', choices=('text', 'github'), default='text',
34+
help='Output format. Default is "text".')
35+
parser.add_argument('--relative-to', type=Path,
36+
help='Show relative paths in messages.')
37+
parser.add_argument('--save-stats', type=Path,
38+
help='Save statistics to JSON file.')
39+
parser.add_argument('--save-input', metavar='FILE', type=Path,
40+
help='Pre-parse and save the new input to a file. The file format may change ' +
41+
'from version to version. Use always the same version ' +
42+
'of this tool for one file.')
43+
parser.add_argument('--save-old-input', metavar='FILE', type=Path,
44+
help='Pre-parse and save the old input to a file.')
45+
parser.add_argument('--dump-json', metavar='FILE', type=Path,
46+
help='Dump input data to a JSON file (only for debug purposes).')
47+
parser.add_argument('--help', action='help',
48+
help='Show this help and exit.')
49+
args: ArgsClass = parser.parse_args()
50+
51+
if (args.old is None) and (args.save_input is None):
52+
parser.print_usage()
53+
print('error: at least one of the following arguments is required: old-input, --save-input', file=sys.stderr)
54+
sys.exit(2)
55+
56+
args.relative_to = args.relative_to.absolute() if args.relative_to else None
57+
58+
return args
59+
60+
61+
args: ArgsClass = parse_args()
+109
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,109 @@
1+
# Copyright (c) 2024 Nordic Semiconductor ASA
2+
#
3+
# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause
4+
5+
import sys
6+
import pickle
7+
from pathlib import Path
8+
from dts_tools import devicetree_sources, warning
9+
10+
if devicetree_sources:
11+
sys.path.insert(0, devicetree_sources)
12+
13+
from devicetree import edtlib
14+
15+
16+
class ParseResult:
17+
bindings: 'list[Binding]'
18+
binding_by_name: 'dict[str, Binding]'
19+
def __init__(self):
20+
self.bindings = []
21+
self.binding_by_name = {}
22+
23+
class Property:
24+
name: str
25+
type: str
26+
description: str
27+
enum: 'set[str]'
28+
const: 'str | None'
29+
default: 'str | None'
30+
deprecated: bool
31+
required: bool
32+
specifier_space: str
33+
34+
def __init__(self, prop: edtlib.PropertySpec):
35+
self.name = prop.name
36+
self.type = prop.type or ''
37+
self.description = prop.description or ''
38+
self.enum = { str(x) for x in (prop.enum or []) }
39+
self.const = str(prop.const) if prop.const else None
40+
self.default = str(prop.default) if prop.default else None
41+
self.deprecated = prop.deprecated or False
42+
self.required = prop.required or False
43+
self.specifier_space = str(prop.specifier_space or '')
44+
45+
class Binding:
46+
path: str
47+
name: str
48+
description: str
49+
cells: str
50+
buses: str
51+
properties: 'dict[str, Property]'
52+
53+
def __init__(self, binding: edtlib.Binding, file: Path):
54+
self.path = str(file)
55+
self.name = binding.compatible or self.path
56+
if binding.on_bus is not None:
57+
self.name += '@' + binding.on_bus
58+
self.description = binding.description or ''
59+
cells_array = [
60+
f'{name}={";".join(value)}' for name, value in (binding.specifier2cells or {}).items()
61+
]
62+
cells_array.sort()
63+
self.cells = '&'.join(cells_array)
64+
busses_array = list(binding.buses or [])
65+
busses_array.sort()
66+
self.buses = ';'.join(busses_array)
67+
self.properties = {}
68+
for key, value in (binding.prop2specs or {}).items():
69+
prop = Property(value)
70+
self.properties[key] = prop
71+
72+
73+
def get_binding_files(bindings_dirs: 'list[Path]') -> 'list[Path]':
74+
binding_files = []
75+
for bindings_dir in bindings_dirs:
76+
if not bindings_dir.is_dir():
77+
raise FileNotFoundError(f'Bindings directory "{bindings_dir}" not found.')
78+
for file in bindings_dir.glob('**/*.yaml'):
79+
binding_files.append(file)
80+
for file in bindings_dir.glob('**/*.yml'):
81+
binding_files.append(file)
82+
return binding_files
83+
84+
85+
def parse_bindings(dirs_or_pickle: 'list[Path]|Path') -> ParseResult:
86+
result = ParseResult()
87+
if isinstance(dirs_or_pickle, list):
88+
yaml_files = get_binding_files(dirs_or_pickle)
89+
fname2path: 'dict[str, str]' = {
90+
path.name: str(path) for path in yaml_files
91+
}
92+
for binding_file in yaml_files:
93+
try:
94+
binding = Binding(edtlib.Binding(str(binding_file), fname2path, None, False, False), binding_file)
95+
if binding.name in result.binding_by_name:
96+
warning(f'Repeating binding {binding.name}: {binding.path} {result.binding_by_name[binding.name].path}')
97+
result.bindings.append(binding)
98+
result.binding_by_name[binding.name] = binding
99+
except edtlib.EDTError as err:
100+
warning(err)
101+
else:
102+
with open(dirs_or_pickle, 'rb') as fd:
103+
result = pickle.load(fd)
104+
return result
105+
106+
107+
def save_bindings(parse_result: ParseResult, file: Path):
108+
with open(file, 'wb') as fd:
109+
pickle.dump(parse_result, fd)

0 commit comments

Comments
 (0)