-
Notifications
You must be signed in to change notification settings - Fork 7
299 lines (275 loc) · 10.2 KB
/
wheel.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
name: Python Wheels
on:
workflow_dispatch:
release:
types: ['released', 'prereleased']
env:
PACKAGE_VERSION: '1.0.0a20.dev0'
PACKAGE_NAME: alpaqa
jobs:
# First we build the wheels natively (build system == host system).
# This allows us to import the compiled modules, and automatically generate
# stub files for them. Those stub files are then included in the sdist
# (source distribution), to be later included in the cross-compiled packages
# as well (because we can't generate stubs while cross-compiling).
# By building the native wheels first, we can already start testing while the
# cross-compiled versions are being built.
build-sdist:
name: Build sdist
runs-on: ubuntu-22.04
strategy:
matrix:
python-version: ['3.13']
env:
CCACHE_DIR: ${{ github.workspace }}/.ccache
steps:
- name: Checkout
uses: actions/checkout@v4
# Ccache
- name: Install ccache
run: |
sudo apt-get update
sudo apt-get install -y ccache
mkdir -p "${{ env.CCACHE_DIR }}"
- name: Cache ccache
uses: actions/cache@v4
with:
path: ${{ env.CCACHE_DIR }}
key: ${{ runner.os }}-wheel-native-ccache-${{ github.run_id }}
restore-keys: ${{ runner.os }}-wheel-native-ccache
- name: Install Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install Python dependencies
run: >
pip install -U
pip build conan ninja 'py-build-cmake~=0.3.1' 'pybind11-stubgen~=2.5.1' 'numpy<3'
# Cache the Conan cache folder to speed up installation of the dependencies.
- name: Cache Conan dependencies
uses: actions/cache@v4
with:
path: ~/.conan2/p
# Key is unique, to force updating the cache, but we still want the
# cache to be restored, so we use restore-keys with a matching prefix.
key: ${{ runner.os }}-build-sdist-${{ github.sha }}
restore-keys: ${{ runner.os }}-build-sdist-
- name: Prepare Conan configuration
run: |
conan profile detect -f
cat <<- EOF > conan.profile
include(default)
include(${{ github.workspace }}/scripts/ci/alpaqa-python.profile)
[conf]
tools.cmake.cmaketoolchain:generator=Ninja Multi-Config
tools.build:skip_test=true
[buildenv]
CFLAGS=-fdiagnostics-color
CXXFLAGS=-fdiagnostics-color
LDFLAGS=-static-libgcc -static-libstdc++ -flto=auto
EOF
- name: Install Conan recipes
run: |
recipes="${{ github.workspace }}/tttapa-conan-recipes"
git clone https://github.com/tttapa/conan-recipes "$recipes"
conan remote add tttapa-conan-recipes "$recipes" --force
- name: Install Conan dependencies
run: |
for c in Debug Release; do
conan install . --build=missing -pr:h conan.profile -s build_type=$c
done
- name: Build Wheel package
run: python3 -m build -w -C local="scripts/ci/py-build-cmake.toml"
env:
CMAKE_C_COMPILER_LAUNCHER: ccache
CMAKE_CXX_COMPILER_LAUNCHER: ccache
- name: Upload Wheel
uses: actions/upload-artifact@v4
with:
name: native-wheels
path: dist/*.whl
retention-days: 1
- name: Install stubs
run: |
# We install the Python modules and stubs in the source directory
for i in 10 20; do
py-build-cmake --local="scripts/ci/py-build-cmake.toml" \
configure --index $i
py-build-cmake --local="scripts/ci/py-build-cmake.toml" \
install --index $i --component python_modules -- --prefix python
py-build-cmake --local="scripts/ci/py-build-cmake.toml" \
install --index $i --component python_stubs -- --prefix python
done
# Then we remove the binary Python modules (sdist is source only)
while IFS= read -r f || [ -n "$f" ]; do rm -f "$f"
done < build/python-debug/install_manifest_python_modules.txt
while IFS= read -r f || [ -n "$f" ]; do rm -f "$f"
done < build/python-release/install_manifest_python_modules.txt
env:
CMAKE_C_COMPILER_LAUNCHER: ccache
CMAKE_CXX_COMPILER_LAUNCHER: ccache
- name: Create sdist
run: python3 -m build -s
- name: Upload sdist
uses: actions/upload-artifact@v4
with:
name: sdist
path: dist/*.tar.gz
retention-days: 1
# Testing is done in the official Python Docker container: https://hub.docker.com/_/python/
# This should match more closely to the environment that users might use.
# It also ensures that we don't accidentally depend on any libraries specific
# to the build container.
test-linux:
name: Run tests
needs: [build-sdist]
runs-on: ubuntu-latest
container: python:3.13-bookworm
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Download wheels
uses: actions/download-artifact@v4
with:
name: native-wheels
path: dist
- name: Install
run: python3 -m pip install --find-links=dist "${PACKAGE_NAME}[test]==${PACKAGE_VERSION}"
- name: Test
run: pytest -rP
# After the native build, we have the stub files, and we can start cross-
# compiling for other architectures.
cross-build-linux:
name: Cross-build wheels for ${{ matrix.host }} - ${{ matrix.python-version }}
needs: [build-sdist]
runs-on: ubuntu-latest
strategy:
matrix:
host: [x86_64-bionic-linux-gnu, aarch64-rpi3-linux-gnu]
python-version:
- python3.13
- pypy3.10-v7.3
env:
CCACHE_DIR: ${{ github.workspace }}/.ccache
steps:
- name: Checkout
uses: actions/checkout@v4
# Ccache
- name: Install ccache
run: |
sudo apt-get update
sudo apt-get install -y ccache
mkdir -p "${{ env.CCACHE_DIR }}"
- name: Cache ccache
uses: actions/cache@v4
with:
path: ${{ env.CCACHE_DIR }}
key: ${{ runner.os }}-wheel-${{ matrix.host }}-${{ matrix.python-version }}-ccache-${{ github.run_id }}
restore-keys: ${{ runner.os }}-wheel-${{ matrix.host }}-${{ matrix.python-version }}-ccache
- name: Download sdist
uses: actions/download-artifact@v4
with:
name: sdist
path: dist
- name: Extract sdist
run: mkdir sdist && tar xf dist/*.tar.gz -C sdist --strip-components 1
- name: Build
uses: ./.github/workflows/python-build
with:
source-dir: sdist
host: ${{ matrix.host }}
python-version: ${{ matrix.python-version }}
ccache: ccache
- name: Upload package
uses: actions/upload-artifact@v4
with:
name: wheels-${{ matrix.host }}-${{ matrix.python-version }}
path: ./sdist/dist/*.whl
# Build for Windows and macOS using cibuildwheel.
# Since we're not specifying any cross-compilation settings, py-build-cmake
# will use its default cross-compilation settings for Windows on ARM64.
# For macOS, we build universal wheels that work on both Intel and ARM macs.
build-macos-windows:
name: Build wheels for ${{ matrix.os }}
needs: [build-sdist]
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [macos-latest, windows-latest]
fail-fast: false
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Download sdist
uses: actions/download-artifact@v4
with:
name: sdist
path: dist
- name: Extract sdist
shell: bash
run: |
mkdir sdist
tar xf dist/*.tar.gz -C sdist --strip-components 1
cp -a scripts sdist
- name: Build wheels
uses: pypa/cibuildwheel@ee63bf16da6cddfb925f542f2c7b59ad50e93969
with:
package-dir: sdist
output-dir: dist
env:
CIBW_ENABLE: 'pypy'
- name: Upload package
uses: actions/upload-artifact@v4
with:
name: wheels-${{ matrix.os }}
path: ./dist/*.whl
# This step checks the package version before release (to make sure that the
# package version actually matches the version of the GitHub release tag),
# and uses Twine to check the metadata of the packages.
check-release:
if: ${{ github.event.action == 'released' || github.event.action == 'prereleased' }}
needs: [build-sdist, test-linux, build-macos-windows]
runs-on: ubuntu-latest
container: python:3.12-bullseye
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
pattern: wheels-*
path: dist
merge-multiple: true
- name: Install package
run: python -m pip install --no-deps --no-index --find-links=dist ${PACKAGE_NAME}==${PACKAGE_VERSION}
- name: Check package version
run: |
[ "${{ github.event.release.tag_name }}" == $(python -c 'from importlib.metadata import version as v; print(v("${{ env.PACKAGE_NAME }}"))') ]
shell: bash
- name: Twine check
run: |
python -m pip install twine
twine check dist/*
# Here we download the sdist and the built Wheel files, and upload them to
# TestPyPI. You should follow the trusted publishing instructions in the
# https://github.com/pypa/gh-action-pypi-publish README and on
# https://docs.pypi.org/trusted-publishers carefully!
release:
needs: [check-release]
if: ${{ github.event.action == 'released' || github.event.action == 'prereleased' }}
runs-on: ubuntu-latest
environment:
name: testpypi
url: https://pypi.org/p/alpaqa
permissions:
id-token: write # mandatory for trusted publishing
steps:
- uses: actions/download-artifact@v4
with:
pattern: wheels-*
path: dist
merge-multiple: true
- uses: actions/download-artifact@v4
with:
name: sdist
path: dist
- name: Publish package distributions to PyPI
uses: pypa/gh-action-pypi-publish@15c56dba361d8335944d31a2ecd17d700fc7bcbc