updates to the build process and binary bundles

All platforms:

- rename scripts/ to tools/: Bazelisk expects to find its wrapper script
(used by the Mac changes below) in tools/. Rather than have a separate
scripts/ and tools/, it's simpler to just move everything into tools/.
- wheel outputs and binary bundles now go into .bazel/out/dist. While
not technically Bazel build products, doing it this way ensures they get
cleaned up when 'bazel clean' is run, and it keeps them out of the source
folder.
- update to the latest Bazel

Windows changes:

- bazel.bat has been removed, and tools\setup-env.bat has been added.
Other scripts like .\run.bat will automatically call it to set up the
environment.
- because Bazel is now on the path, you can 'bazel test ...' from any
folder, instead of having to do \anki\bazel.
- the bat files can handle being called from any working directory,
so things like running "\anki\tools\python" from c:\ will work.
- build installer as part of bundling process

Mac changes:

- `arch -arch x86_64 bazel ...` will now automatically use a different
build root, so that it is cheap to switch back and forth between archs
on a new Mac.
- tools/run-qt* will now automatically use Rosetta
- disable jemalloc in Mac x86 build for now, as it won't build under
Rosetta (perhaps due to its build scripts using $host_cpu instead of
$target_cpu)
- create app bundle as part of bundling process

Linux changes:

- remove arm64 orjson workaround in Linux bundle, as without a
readily-available, relatively distro-agonstic PyQt/Qt build
we can use, the arm64 Linux bundle is of very limited usefulness.
- update Docker files for release build
- include fcitx5 in both the qt5 and qt6 bundles
- create tarballs as part of the bundling process
This commit is contained in:
Damien Elmes 2022-01-30 11:50:14 +11:00
parent d55f080733
commit 95dbf30fb9
123 changed files with 4062 additions and 601 deletions

View File

@ -1 +1,2 @@
node_modules
.bazel

View File

@ -8,7 +8,7 @@ build --enable_runfiles
build:windows --build_python_zip=false
# record version/build hash
build --workspace_status_command='bash ./scripts/status.sh'
build --workspace_status_command='bash ./tools/status.sh'
# run clippy when compiling rust in test mode
test --aspects=@rules_rust//rust:defs.bzl%rust_clippy_aspect --output_groups=+clippy_checks
@ -31,7 +31,9 @@ build:windows --worker_quit_after_build
# place convenience symlinks inside a single folder for easier exclusion in IDEs
build --symlink_prefix=.bazel/
build --experimental_no_product_name_out_symlink
# if (auto-created) windows.bazelrc exists, import it
try-import %workspace%/windows.bazelrc
# allow extra user customizations in a separate file
# (see .user.bazelrc for an example)

View File

@ -1 +1 @@
4.2.1
5.0.0

View File

@ -1,13 +1,17 @@
FROM debian:10-slim
FROM python:3.9-slim-buster
ARG DEBIAN_FRONTEND="noninteractive"
ARG uid=1000
ARG gid=1000
RUN apt-get update \
RUN useradd -d /state -m -u 998 user
RUN apt-get update && apt install --yes gnupg ca-certificates && \
apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 32A37959C2FA5C3C99EFBC32A79206696452D198 \
&& echo "deb https://apt.buildkite.com/buildkite-agent stable main" > /etc/apt/sources.list.d/buildkite-agent.list \
&& apt-get update \
&& apt-get install --yes --no-install-recommends \
autoconf \
bash \
buildkite-agent \
ca-certificates \
curl \
findutils \
@ -23,6 +27,7 @@ RUN apt-get update \
libgstreamer-plugins-base1.0 \
libgstreamer1.0-0 \
libnss3 \
libpulse-mainloop-glib0 \
libpulse-mainloop-glib0 \
libssl-dev \
libxcomposite1 \
@ -37,23 +42,23 @@ RUN apt-get update \
make \
pkg-config \
portaudio19-dev \
python3-dev \
rsync \
zstd \
&& rm -rf /var/lib/apt/lists/*
RUN curl -L https://github.com/bazelbuild/bazelisk/releases/download/v1.10.1/bazelisk-linux-amd64 \
RUN curl -L https://github.com/bazelbuild/bazelisk/releases/download/v1.7.4/bazelisk-linux-amd64 \
-o /usr/local/bin/bazel \
&& chmod +x /usr/local/bin/bazel
RUN mkdir -p /code/bazel-docker/home && \
echo groupadd -g ${gid} user && \
useradd -d /code/bazel-docker/home -m -u ${uid} user && \
chown -R user.user /code
RUN ln -sf /usr/bin/python3 /usr/bin/python
USER user
COPY build-entrypoint /tmp
WORKDIR /code
ENV XDG_CACHE_HOME=/code/bazel-docker/home
RUN mkdir -p /etc/buildkite-agent/hooks && chown -R user /etc/buildkite-agent
ENTRYPOINT ["/bin/bash", "/tmp/build-entrypoint"]
COPY buildkite.cfg /etc/buildkite-agent/buildkite-agent.cfg
COPY environment /etc/buildkite-agent/hooks/environment
USER user
WORKDIR /code/buildkite
ENTRYPOINT ["/usr/bin/buildkite-agent", "start"]

View File

@ -1,13 +1,18 @@
FROM debian:11-slim
ARG DEBIAN_FRONTEND="noninteractive"
ARG uid=1000
ARG gid=1000
ENV PYTHON_SITE_PACKAGES=/usr/lib/python3/dist-packages/
RUN apt-get update \
RUN useradd -d /state -m -u 998 user
RUN apt-get update && apt install --yes gnupg ca-certificates && \
apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 32A37959C2FA5C3C99EFBC32A79206696452D198 \
&& echo "deb https://apt.buildkite.com/buildkite-agent stable main" > /etc/apt/sources.list.d/buildkite-agent.list \
&& apt-get update \
&& apt-get install --yes --no-install-recommends \
autoconf \
bash \
buildkite-agent \
ca-certificates \
curl \
findutils \
@ -23,6 +28,7 @@ RUN apt-get update \
libgstreamer-plugins-base1.0 \
libgstreamer1.0-0 \
libnss3 \
libpulse-mainloop-glib0 \
libpulse-mainloop-glib0 \
libssl-dev \
libxcomposite1 \
@ -37,12 +43,13 @@ RUN apt-get update \
make \
pkg-config \
portaudio19-dev \
python3-dev \
rsync \
# -- begin only required for arm64/debian11
clang-format \
python-is-python3 \
python3-pyqt5.qtwebengine \
# -- end only required for arm64/debian11
# -- end only required for arm64/debian11
&& rm -rf /var/lib/apt/lists/*
@ -50,12 +57,13 @@ RUN curl -L https://github.com/bazelbuild/bazelisk/releases/download/v1.10.1/baz
-o /usr/local/bin/bazel \
&& chmod +x /usr/local/bin/bazel
RUN echo groupadd -g ${gid} user && useradd -d /code/bazel-docker/home -m -u ${uid} user
RUN ln -sf /usr/bin/python3 /usr/bin/python
RUN mkdir -p /etc/buildkite-agent/hooks && chown -R user /etc/buildkite-agent
COPY buildkite.cfg /etc/buildkite-agent/buildkite-agent.cfg
COPY environment /etc/buildkite-agent/hooks/environment
USER user
COPY build-entrypoint /tmp
WORKDIR /code
ENV XDG_CACHE_HOME=/code/bazel-docker/home
ENV PYTHON_SITE_PACKAGES=/usr/lib/python3/dist-packages/
ENTRYPOINT ["/bin/bash", "/tmp/build-entrypoint"]
WORKDIR /code/buildkite
ENTRYPOINT ["/usr/bin/buildkite-agent", "start"]

View File

@ -0,0 +1,6 @@
name="lin-ci"
tags="queue=lin-ci"
build-path="/state/build"
hooks-path="/etc/buildkite-agent/hooks"
no-plugins=true
no-local-hooks=true

View File

@ -0,0 +1,7 @@
#!/bin/bash
if [[ "${BUILDKITE_COMMAND}" != ".buildkite/linux/entrypoint" &&
"${BUILDKITE_COMMAND}" != ".buildkite/linux/release-entrypoint" ]]; then
echo "Command not allowed: ${BUILDKITE_COMMAND}"
exit 1
fi

30
.buildkite/linux/docker/run.sh Executable file
View File

@ -0,0 +1,30 @@
#!/bin/bash
# use './run.sh serve' to daemonize
set -e
if [ "$1" = "serve" ]; then
extra_args="-d --restart always"
else
extra_args="-it"
fi
if [ $(uname -m) = "aarch64" ]; then
arch=arm64
else
arch=amd64
fi
DOCKER_BUILDKIT=1 docker build -f Dockerfile.${arch} --tag linci .
if docker container inspect linci > /dev/null 2>&1; then
docker stop linci || true
docker container rm linci
fi
docker run $extra_args \
--name linci \
-v ci-state:/state \
-e BUILDKITE_AGENT_TOKEN \
-e BUILDKITE_AGENT_TAGS \
linci

View File

@ -17,7 +17,7 @@ test -e /state/node_modules && mv /state/node_modules .
$BAZEL test $BUILDARGS ... //rslib/linkchecker
echo "--- Running lints"
python scripts/copyright_headers.py
python tools/copyright_headers.py
echo "--- Cleanup"
# if tests succeed, back up node_modules folder

View File

@ -0,0 +1,18 @@
#!/bin/bash
set -e
# move existing node_modules into tree
test -e /state/node_modules && mv /state/node_modules .
if [ $(uname -m) = "aarch64" ]; then
./tools/build
else
./tools/bundle
fi
rm -rf /state/dist
mv .bazel/out/dist /state
# if tests succeed, back up node_modules folder
mv node_modules /state/

View File

@ -15,7 +15,7 @@ test -e $STATE/node_modules && mv $STATE/node_modules .
$BAZEL test $BUILDARGS ...
echo "--- Building wheels"
$BAZEL build dist
$BAZEL build wheels
# if tests succeed, back up node_modules folder
mv node_modules $STATE/

9
.gitignore vendored
View File

@ -1,11 +1,10 @@
__pycache__
.DS_Store
/bazel-*
anki.prof
target
user.bazelrc
/user.bazelrc
.dmypy.json
rust-project.json
node_modules
.idea/
.bazel
/.idea/
/.bazel
/windows.bazelrc

View File

@ -4,19 +4,21 @@
"**/.git/objects/**": true,
"**/.git/subtree-cache/**": true,
"**/node_modules/*/**": true,
".bazel/**": true
".bazel/**": true,
"dist/**": true
},
"python.analysis.extraPaths": ["./pylib"],
"python.formatting.provider": "black",
"rust-analyzer.cargo.runBuildScripts": true,
"rust-analyzer.checkOnSave.allTargets": false,
"rust-analyzer.files.excludeDirs": [".bazel", "node_modules"],
"rust-analyzer.files.excludeDirs": [".bazel", "node_modules", "dist"],
"rust-analyzer.procMacro.enable": true,
// this formats 'use' blocks in a nicer way, but requires you to run
// 'rustup install nightly'.
"rust-analyzer.rustfmt.extraArgs": ["+nightly"],
"search.exclude": {
"**/node_modules": true,
".bazel/**": true
".bazel/**": true,
"dist/**": true
}
}

View File

@ -12,24 +12,23 @@ genrule(
srcs = ["//:defs.bzl"],
outs = ["buildinfo.txt"],
cmd = select({
"release": "$(location //scripts:buildinfo) $(location //:defs.bzl) bazel-out/stable-status.txt release > $@",
"//conditions:default": "$(location //scripts:buildinfo) $(location //:defs.bzl) bazel-out/stable-status.txt devel > $@",
"release": "$(location //tools:buildinfo) $(location //:defs.bzl) bazel-out/stable-status.txt release > $@",
"//conditions:default": "$(location //tools:buildinfo) $(location //:defs.bzl) bazel-out/stable-status.txt devel > $@",
}),
stamp = 1,
tools = [
"//scripts:buildinfo",
"//tools:buildinfo",
],
visibility = ["//visibility:public"],
)
pkg_tar(
name = "dist",
name = "wheels",
srcs = [
"//pylib/anki:wheel",
"//qt/aqt:wheel",
],
mode = "0644",
package_dir = "bazel-dist",
tags = ["manual"],
)

View File

@ -6,7 +6,7 @@ license = "AGPL-3.0-or-later"
[workspace]
members = ["rslib", "rslib/i18n", "rslib/i18n_helpers", "rslib/linkchecker", "pylib/rsbridge"]
exclude = ["qt/package"]
exclude = ["qt/bundle"]
[lib]
# dummy top level for tooling

View File

@ -1,2 +0,0 @@
@set PATH=c:\msys64\usr\bin;c:\python;%PATH%
\bazel\bazel --output_user_root=\bazel\anki %*

View File

@ -1,7 +1,7 @@
This folder integrates Rust crates.io fetching into Bazel.
To add or update dependencies, ensure a local Rust environment is available
(eg `source scripts/cargo-env`), then install cargo-raze:
(eg `source tools/cargo-env`), then install cargo-raze:
```
cargo install cargo-raze --version 0.14.1

View File

@ -56,13 +56,13 @@ Run the following command to create Python packages:
On Mac/Linux:
```
./scripts/build
./tools/build
```
On Windows:
```
.\scripts\build.bat
.\tools\build.bat
```
The generated wheel paths will be printed as the build completes.
@ -71,7 +71,7 @@ Follow the steps [on the beta site](https://betas.ankiweb.net/#via-pypipip), but
`pip install --upgrade --pre aqt[qt6]` line with something like:
```
pyenv/bin/pip install --upgrade bazel-dist/*.whl
pyenv/bin/pip install --upgrade dist/*.whl
```
(On Windows you'll need to list out the filenames manually instead of using a wildcard).
@ -94,36 +94,14 @@ The build process will download about a gigabyte of dependencies, and produce
about 6 gigabytes of temporary files. Once you've created the wheels, you can
remove the other files to free up space if you wish.
- `bazel clean --expunge` will remove the generated files, freeing up most
of the space. The files are usualy stored in a subdir of ~/.cache/bazel/
- `rm -rf ~/.cache/bazel*` will remove the cached downloads as well, requiring
them to be redownloaded if you want to build again.
- `bazel clean --expunge` will remove the generated Bazel files, freeing up
most of the space. The files are usualy stored in a subdir of
`~/.cache/bazel` or `\bazel\anki`
- `rm -rf ~/.cache/bazel*` or `\bazel\anki` will remove cached downloads as
well, requiring them to be redownloaded if you want to build again.
- `rm -rf ~/.cache/{yarn,pip}` will remove the shared pip and yarn caches that
other apps may be using as well.
## Building with Docker
Linux users can build using the instructions above, or they can optionally [build
via Docker](../scripts/docker/README.md).
On Linux, the generated Anki wheel will have a filename like:
anki-2.1.49-cp39-abi3-manylinux_2_31_aarch64.whl
The 2_31 part means that the wheel requires glibc 2.31 or later. If you have
built the wheel on a machine with an older glibc version, you will get an error
if you try to install the wheel:
ERROR: No matching distribution found for anki
To avoid the error, you can rename the .whl file to match your glibc version.
If you still get the error, another possibility is that you are trying to
install with an old version of Python - 3.9 or later is required.
On ARM Linux, please see the instructions in the pre-built wheels section about
a system PyQt, and the notes at the bottom of [Linux](./linux.md).
## Running tests
You can run all tests at once. From the top level project folder:
@ -151,7 +129,7 @@ On Mac/Linux, after installing 'fswatch', you can run mypy on
each file save automatically with:
```
./scripts/mypy-watch
./tools/mypy-watch
```
## Fixing formatting
@ -233,6 +211,12 @@ in the collection2.log file will also be printed on stdout.
If ANKI_PROFILE_CODE is set, Python profiling data will be written on exit.
# Binary Bundles
Anki's official binary packages are created with `tools/bundle`. The script was created specifically
for the official builds, and is provided as-is; we are unfortunately not able to provide assistance with
any issues you may run into when using it.
## Mixing development and study
You may wish to create a separate profile with File>Switch Profile for use

View File

@ -17,12 +17,12 @@ RUN curl -fsSL https://github.com/bazelbuild/bazelisk/releases/download/v1.7.4/b
WORKDIR /opt/anki
COPY . .
# Build python wheels.
RUN ./scripts/build
RUN ./tools/build
# Install pre-compiled Anki.
FROM python:${PYTHON_VERSION}-slim as installer
WORKDIR /opt/anki/
COPY --from=build /opt/anki/bazel-dist/ wheels/
COPY --from=build /opt/anki/wheels/ wheels/
# Use virtual environment.
RUN python -m venv venv \
&& ./venv/bin/python -m pip install --no-cache-dir setuptools wheel \
@ -35,29 +35,29 @@ ENV PATH=/opt/anki/venv/bin:$PATH
# Install run-time dependencies.
RUN apt-get update \
&& apt-get install --yes --no-install-recommends \
libasound2 \
libdbus-1-3 \
libfontconfig1 \
libfreetype6 \
libgl1 \
libglib2.0-0 \
libnss3 \
libxcb-icccm4 \
libxcb-image0 \
libxcb-keysyms1 \
libxcb-randr0 \
libxcb-render-util0 \
libxcb-shape0 \
libxcb-xinerama0 \
libxcb-xkb1 \
libxcomposite1 \
libxcursor1 \
libxi6 \
libxkbcommon0 \
libxkbcommon-x11-0 \
libxrandr2 \
libxrender1 \
libxtst6 \
libasound2 \
libdbus-1-3 \
libfontconfig1 \
libfreetype6 \
libgl1 \
libglib2.0-0 \
libnss3 \
libxcb-icccm4 \
libxcb-image0 \
libxcb-keysyms1 \
libxcb-randr0 \
libxcb-render-util0 \
libxcb-shape0 \
libxcb-xinerama0 \
libxcb-xkb1 \
libxcomposite1 \
libxcursor1 \
libxi6 \
libxkbcommon0 \
libxkbcommon-x11-0 \
libxrandr2 \
libxrender1 \
libxtst6 \
&& rm -rf /var/lib/apt/lists/*
# Add non-root user.
RUN useradd --create-home anki

View File

@ -1,14 +1,17 @@
# Anki in Docker
# Building and running Anki in Docker
This is an example of how you can build and run Anki from inside Docker. This
approach keeps everything inside Docker images, and sends the GUI to an X11
display over TCP/IP. This approach keeps things tidy, so may be a good choice
for if you wish to build Anki irregularly and don't want to build it outside of
Docker.
This is an example Dockerfile contributed by an Anki user, which shows how Anki
can be both built and run from within a container. It works by streaming the GUI
over an X11 socket.
It takes longer to build after small changes however, so for development, if you
wish to use Docker, the approach [in the build
scripts](../../scripts/docker/README.md) may be more appropriate.
Building and running Anki within a container has the advantage of fully isolating
the build products and runtime dependencies from the rest of your system, but it is
a somewhat niche approach, with some downsides such as an inability to display natively
on Wayland, and a lack of integration with desktop icons/filetypes. But even if you
do not use this Dockerfile as-is, you may find it useful as a reference.
Anki's Linux CI is also implemented with Docker, and the Dockerfiles for that may
also be useful for reference - they can be found in `.buildkite/linux/docker`.
# Build the Docker image

View File

@ -34,7 +34,7 @@ run 'rustup install nightly'.
Code completion partly depends on files that are generated as part of the
regular build process, so for things to work correctly, use './run' or
'scripts/build' prior to using code completion.
'tools/build' prior to using code completion.
## PyCharm/IntelliJ

View File

@ -6,8 +6,9 @@ These instructions are written for Debian/Ubuntu; adjust for your distribution.
Some extra notes have been provided by a forum member:
https://forums.ankiweb.net/t/guide-how-to-build-and-run-anki-from-source-with-xubuntu-20-04/12865
You can see a full list of requirements by looking at the [Dockerfiles](../scripts/docker/README.md)
in the scripts folder.
You can see a full list of buildtime and runtime requirements by looking at the
[Dockerfiles](../.buildkite/linux/docker/Dockerfile.amd64) used to build the
official releases.
Glibc is required - if you are on a distro like Alpine that uses musl, you'll need
to contribute fixes to the upstream [Rust rules](https://github.com/bazelbuild/rules_rust/issues/390),
@ -101,7 +102,7 @@ to compile, but will mean Anki will run considerably slower.
To run Anki in optimized mode, use:
```
./scripts/runopt
./tools/runopt
```
## ARM64 support
@ -122,7 +123,7 @@ Note: the trailing slash at the end is required.
There are a few things to be aware of:
- You should use ./run and not scripts/run-qt5\*, even if your system libraries are Qt5.
- You should use ./run and not tools/run-qt5\*, even if your system libraries are Qt5.
- If your system libraries are Qt5, when creating an aqt wheel, the wheel will not work
on Qt6 environments.
- Some of the tests only work with PyQt6, and will show failures when run under PyQt5.

View File

@ -7,14 +7,12 @@
Install the latest XCode from the App Store. Open it at least once
so it installs the command line tools.
**Homebrew & Homebrew Deps**:
Install Homebrew from <https://brew.sh/>
Then install Bazel:
**Bazelisk**:
```
$ brew install bazelisk
$ curl -L https://github.com/bazelbuild/bazelisk/releases/download/v1.11.0/bazelisk-darwin -o bazel \
&& chmod +x bazel \
&& sudo mv bazel /usr/local/bin
```
**Python**:
@ -61,7 +59,7 @@ to compile, but will mean Anki will run considerably slower.
To run Anki in optimized mode, use:
```
./scripts/runopt
./tools/runopt
```
## More

View File

@ -26,7 +26,7 @@ Things to be aware of:
If you run Anki from git, you can run a sync server with:
```
./scripts/runopt --syncserver
./tools/runopt --syncserver
```
## From a packaged build

View File

@ -74,7 +74,7 @@ to compile, but will mean Anki will run considerably slower.
To run Anki in optimized mode, use:
```
.\scripts\runopt
.\tools\runopt
```
## More
@ -82,7 +82,7 @@ To run Anki in optimized mode, use:
For info on running tests, building wheels and so on, please see
[Development](./development.md).
Note that where the instructions on that page say "bazel", please use ".\bazel"
instead. This runs bazel.bat inside the Anki source folder, instead of
calling Bazel directly. This takes care of setting up the path and output folder
correctly, which avoids issues with long path names.
When you run a script like .\run, MSYS and bazel will automatically be added to
the path, and Bazel will be configured to output build products into
\bazel\anki. If you want to directly invoke bazel before having run any of the
.bat files in this repo, please run tools\setup-env first.

View File

@ -1,3 +1,3 @@
{
"exclude": ["**/node_modules", ".bazel"]
"exclude": ["**/node_modules", ".bazel", "dist"]
}

View File

@ -14,10 +14,10 @@ python -m venv venv
../bazel.bat --output_base=/c/bazel/anki/base build //pylib/anki:wheel //qt/aqt:wheel
# install wheels, bound to constrained versions
venv/scripts/pip install -c requirements.txt ../bazel-bin/pylib/anki/*.whl ../bazel-bin/qt/aqt/*.whl pip-licenses
venv/tools/pip install -c requirements.txt ../bazel-bin/pylib/anki/*.whl ../bazel-bin/qt/aqt/*.whl pip-licenses
# dump licenses - ptable is a pip-licenses dep
venv/scripts/pip-licenses --format=json --ignore-packages anki aqt pip-license PTable > licenses.json
venv/tools/pip-licenses --format=json --ignore-packages anki aqt pip-license PTable > licenses.json
# clean up
rm -rf venv

View File

@ -11,6 +11,7 @@ libc-stdhandle = "=0.1.0"
[dependencies.pyembed]
git = "https://github.com/ankitects/PyOxidizer.git"
# when changing this, pyoxidizer in /repos.bzl needs to be updated as well
rev = "eb26dd7cd1290de6503869f3d719eabcec45e139"
default-features = false

395
qt/bundle/build.py Normal file
View File

@ -0,0 +1,395 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
from __future__ import annotations
import glob
import os
import platform
import re
import shutil
import subprocess
import sys
from pathlib import Path
is_win = sys.platform == "win32"
is_mac = sys.platform == "darwin"
workspace = Path(sys.argv[1])
bazel_external = Path(sys.argv[2])
def with_exe_extension(program: str) -> str:
if is_win:
return program + ".exe"
else:
return program
output_root = workspace / ".bazel" / "out" / "build"
dist_folder = output_root / ".." / "dist"
venv = output_root / f"venv-{platform.machine()}"
build_folder = output_root / f"build-{platform.machine()}"
cargo_target = output_root / f"target-{platform.machine()}"
artifacts = output_root / "artifacts"
pyo3_config = output_root / "pyo3-build-config-file.txt"
pyoxidizer_folder = bazel_external / "pyoxidizer"
arm64_protobuf_wheel = bazel_external / "protobuf_wheel_mac_arm64"
pyoxidizer_binary = cargo_target / "release" / with_exe_extension("pyoxidizer")
for path in dist_folder.glob("*.zst"):
path.unlink()
os.environ["PYOXIDIZER_ARTIFACT_DIR"] = str(artifacts)
os.environ["PYOXIDIZER_CONFIG"] = str(Path(os.getcwd()) / "pyoxidizer.bzl")
os.environ["CARGO_TARGET_DIR"] = str(cargo_target)
# OS-specific things
pyqt5_folder_name = "pyqt515"
pyqt6_folder_path = bazel_external / "pyqt6" / "PyQt6"
extra_linux_deps = bazel_external / "bundle_extras_linux_amd64"
extra_qt5_linux_plugins = extra_linux_deps / "qt5"
extra_qt6_linux_plugins = extra_linux_deps / "qt6"
is_lin = False
arm64_linux = arm64_mac = False
if is_win:
os.environ["TARGET"] = "x86_64-pc-windows-msvc"
elif sys.platform.startswith("darwin"):
if platform.machine() == "arm64":
arm64_mac = True
pyqt5_folder_name = None
os.environ["TARGET"] = "aarch64-apple-darwin"
os.environ["MACOSX_DEPLOYMENT_TARGET"] = "11.0"
else:
pyqt5_folder_name = "pyqt514"
os.environ["TARGET"] = "x86_64-apple-darwin"
os.environ["MACOSX_DEPLOYMENT_TARGET"] = "10.13"
else:
is_lin = True
if platform.machine() == "x86_64":
os.environ["TARGET"] = "x86_64-unknown-linux-gnu"
else:
os.environ["TARGET"] = "aarch64-unknown-linux-gnu"
pyqt5_folder_name = None
pyqt6_folder_path = None
arm64_linux = True
if is_win:
python_bin_folder = venv / "scripts"
os.environ["PATH"] += rf";{os.getenv('USERPROFILE')}\.cargo\bin"
cargo_features = "build-mode-prebuilt-artifacts"
else:
python_bin_folder = venv / "bin"
# PyOxidizer build depends on a system-installed version of Python,
# as the standalone build does not have its config set up properly,
# leading to "directory not found for option '-L/install/lib'".
# On macOS, after installing a system Python in /usr/local/bin,
# make sure /usr/local/bin/python3 is symlinked to /usr/local/bin/python.
os.environ["PATH"] = ":".join(
["/usr/local/bin", f"{os.getenv('HOME')}/.cargo/bin", os.getenv("PATH")]
)
cargo_features = "build-mode-prebuilt-artifacts"
if not is_mac or arm64_mac:
cargo_features += " global-allocator-jemalloc allocator-jemalloc"
python = python_bin_folder / with_exe_extension("python")
pip = python_bin_folder / with_exe_extension("pip")
artifacts_in_build = (
build_folder / os.getenv("TARGET") / "release" / "resources" / "extra_files"
)
def build_pyoxidizer():
pyoxidizer_folder_mtime = pyoxidizer_folder.stat().st_mtime
if (
pyoxidizer_binary.exists()
and pyoxidizer_binary.stat().st_mtime == pyoxidizer_folder_mtime
):
# avoid recompiling if pyoxidizer folder has not changed
return
subprocess.run(
[
"cargo",
"build",
"--release",
],
cwd=pyoxidizer_folder,
check=True,
)
os.utime(pyoxidizer_binary, (pyoxidizer_folder_mtime, pyoxidizer_folder_mtime))
def install_wheels_into_venv():
# Pip's handling of hashes is somewhat broken. It spots the hashes in the constraints
# file and forces all files to have a hash. We can manually hash our generated wheels
# and pass them in with hashes, but it still breaks, because the 'protobuf>=3.17'
# specifier in the pylib wheel is not allowed. Nevermind that a specific version is
# included in the constraints file we pass along! To get things working, we're
# forced to strip the hashes out before installing. This should be safe, as the files
# have already been validated as part of the build process.
constraints = output_root / "deps_without_hashes.txt"
with open(workspace / "python" / "requirements.txt") as f:
buf = f.read()
with open(constraints, "w") as f:
extracted = re.findall("^(\S+==\S+) ", buf, flags=re.M)
extracted = [
line for line in extracted if not arm64_mac or "protobuf" not in line
]
f.write("\n".join(extracted))
# pypi protobuf lacks C extension on darwin-arm64, so we have to use a version
# we built ourselves
if arm64_mac:
wheels = glob.glob(str(arm64_protobuf_wheel / "*.whl"))
subprocess.run(
[pip, "install", "--upgrade", "-c", constraints, *wheels], check=True
)
# install wheels and upgrade any deps
wheels = glob.glob(str(workspace / ".bazel" / "out" / "dist" / "*.whl"))
subprocess.run(
[pip, "install", "--upgrade", "-c", constraints, *wheels], check=True
)
# always reinstall our wheels
subprocess.run(
[pip, "install", "--force-reinstall", "--no-deps", *wheels], check=True
)
def build_artifacts():
if os.path.exists(artifacts):
shutil.rmtree(artifacts)
if os.path.exists(artifacts_in_build):
shutil.rmtree(artifacts_in_build)
subprocess.run(
[
pyoxidizer_binary,
"--system-rust",
"run-build-script",
"build.rs",
"--var",
"venv",
venv,
"--var",
"build",
build_folder,
],
check=True,
env=os.environ
| dict(
CARGO_MANIFEST_DIR=".",
OUT_DIR=str(artifacts),
PROFILE="release",
PYO3_PYTHON=str(python),
),
)
existing_config = None
if os.path.exists(pyo3_config):
with open(pyo3_config) as f:
existing_config = f.read()
with open(artifacts / "pyo3-build-config-file.txt") as f:
new_config = f.read()
# avoid bumping mtime, which triggers crate recompile
if new_config != existing_config:
with open(pyo3_config, "w") as f:
f.write(new_config)
def build_pkg():
subprocess.run(
[
"cargo",
"build",
"--release",
"--no-default-features",
"--features",
cargo_features,
],
check=True,
env=os.environ | dict(PYO3_CONFIG_FILE=str(pyo3_config)),
)
def adj_path_for_windows_rsync(path: Path) -> str:
if not is_win:
return str(path)
path = path.absolute()
rest = str(path)[2:].replace("\\", "/")
return f"/{path.drive[0]}{rest}"
def merge_into_dist(output_folder: Path, pyqt_src_path: Path | None):
if output_folder.exists():
shutil.rmtree(output_folder)
output_folder.mkdir(parents=True)
# PyQt
if pyqt_src_path and not is_mac:
subprocess.run(
[
"rsync",
"-a",
"--delete",
"--exclude-from",
"qt.exclude",
adj_path_for_windows_rsync(pyqt_src_path),
adj_path_for_windows_rsync(output_folder / "lib") + "/",
],
check=True,
)
if is_lin:
if "PyQt5" in str(pyqt_src_path):
src = extra_qt5_linux_plugins
dest = output_folder / "lib" / "PyQt5" / "Qt5" / "plugins"
else:
src = extra_qt6_linux_plugins
dest = output_folder / "lib" / "PyQt6" / "Qt6" / "plugins"
subprocess.run(
["rsync", "-a", str(src) + "/", str(dest) + "/"],
check=True,
)
# Executable and other resources
resources = [
adj_path_for_windows_rsync(
cargo_target / "release" / ("anki.exe" if is_win else "anki")
),
adj_path_for_windows_rsync(artifacts_in_build) + "/",
]
if is_lin:
resources.append("lin/")
subprocess.run(
[
"rsync",
"-a",
"--delete",
"--exclude",
"PyQt6",
"--exclude",
"PyQt5",
*resources,
adj_path_for_windows_rsync(output_folder) + "/",
],
check=True,
)
# Ensure all files are world-readable
if not is_win:
subprocess.run(["chmod", "-R", "a+r", output_folder])
def anki_version() -> str:
with open(workspace / "defs.bzl") as fobj:
data = fobj.read()
return re.search('^anki_version = "(.*)"$', data, re.MULTILINE).group(1)
def annotated_linux_folder_name(variant: str) -> str:
components = ["anki", anki_version(), "linux", variant]
return "-".join(components)
def annotated_mac_dmg_name(variant: str) -> str:
if platform.machine() == "arm64":
arch = "apple"
else:
arch = "intel"
components = ["anki", anki_version(), "mac", arch, variant]
return "-".join(components)
def build_bundle(src_path: Path, variant: str) -> None:
if is_lin:
print("--- Build tarball")
build_tarball(src_path, variant)
elif is_mac:
print("--- Build app bundle")
build_app_bundle(src_path, variant)
def build_app_bundle(src_path: Path, variant: str) -> None:
if arm64_mac:
variant = "qt6_arm64"
else:
variant = f"{variant}_amd64"
subprocess.run(
["cargo", "run", variant, src_path, anki_version(), bazel_external],
check=True,
cwd=workspace / "qt" / "bundle" / "mac",
)
variant_path = src_path.parent / "app" / variant
if os.getenv("NOTARIZE_USER"):
subprocess.run(
["python", "mac/notarize.py", "upload", variant_path],
check=True,
)
# note down the dmg name for later
open(variant_path / "dmg_name", "w").write(
annotated_mac_dmg_name(variant[0:3]) + ".dmg"
)
def build_tarball(src_path: Path, variant: str) -> None:
if not is_lin:
return
dest_path = src_path.with_name(annotated_linux_folder_name(variant))
if dest_path.exists():
shutil.rmtree(dest_path)
os.rename(src_path, dest_path)
print("compress", dest_path.name, "...")
subprocess.run(
[
"tar",
"--zstd",
"-cf",
dist_folder / (dest_path.name + ".tar.zst"),
dest_path.name,
],
check=True,
env=dict(ZSTD_CLEVEL="9"),
cwd=dest_path.parent,
)
def build_windows_installers() -> None:
subprocess.run(
[
"cargo",
"run",
output_root,
bazel_external,
Path(__file__).parent,
anki_version(),
],
check=True,
cwd=workspace / "qt" / "bundle" / "win",
)
print("--- Build PyOxidizer")
build_pyoxidizer()
print("--- Install wheels into venv")
install_wheels_into_venv()
print("--- Build PyOxidizer artifacts")
build_artifacts()
print("--- Build Anki binary")
build_pkg()
print("--- Copy binary+resources into folder (Qt6)")
merge_into_dist(output_root / "std", pyqt6_folder_path)
build_bundle(output_root / "std", "qt6")
if pyqt5_folder_name:
print("--- Copy binary+resources into folder (Qt5)")
merge_into_dist(output_root / "alt", bazel_external / pyqt5_folder_name / "PyQt5")
build_bundle(output_root / "alt", "qt5")
if is_win:
build_windows_installers()
if is_mac:
print("outputs are in .bazel/out/build/{std,alt}")
print("dmg can be created with mac/finalize.py dmg")
else:
print("outputs are in .bazel/out/dist/")

View File

@ -104,6 +104,6 @@ fn main() {
// embed manifest and icon
if target_family == "windows" {
embed_resource::compile("anki-manifest.rc");
embed_resource::compile("win/anki-manifest.rc");
}
}

View File

Before

Width:  |  Height:  |  Size: 34 KiB

After

Width:  |  Height:  |  Size: 34 KiB

204
qt/bundle/mac/Cargo.lock generated Normal file
View File

@ -0,0 +1,204 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "anyhow"
version = "1.0.53"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94a45b455c14666b85fc40a019e8ab9eb75e3a124e05494f5397122bc9eb06e0"
[[package]]
name = "apple-bundles"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48681b45ff6789616b243c0758d6d97639951f937ccc0ea635363505d72cdec3"
dependencies = [
"anyhow",
"plist",
"tugger-file-manifest",
"walkdir",
]
[[package]]
name = "autocfg"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
[[package]]
name = "base64"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd"
[[package]]
name = "glob"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
[[package]]
name = "hashbrown"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
[[package]]
name = "indexmap"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc633605454125dec4b66843673f01c7df2b89479b32e0ed634e43a91cff62a5"
dependencies = [
"autocfg",
"hashbrown",
]
[[package]]
name = "itoa"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35"
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.114"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0005d08a8f7b65fb8073cb697aa0b12b631ed251ce73d862ce50eeb52ce3b50"
[[package]]
name = "line-wrap"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f30344350a2a51da54c1d53be93fade8a237e545dbcc4bdbe635413f2117cab9"
dependencies = [
"safemem",
]
[[package]]
name = "makeapp"
version = "0.1.0"
dependencies = [
"anyhow",
"apple-bundles",
"glob",
"lazy_static",
"plist",
"tugger-file-manifest",
"walkdir",
]
[[package]]
name = "num_threads"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97ba99ba6393e2c3734791401b66902d981cb03bf190af674ca69949b6d5fb15"
dependencies = [
"libc",
]
[[package]]
name = "plist"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd39bc6cdc9355ad1dc5eeedefee696bb35c34caf21768741e81826c0bbd7225"
dependencies = [
"base64",
"indexmap",
"line-wrap",
"serde",
"time",
"xml-rs",
]
[[package]]
name = "safemem"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072"
[[package]]
name = "same-file"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
dependencies = [
"winapi-util",
]
[[package]]
name = "serde"
version = "1.0.130"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f12d06de37cf59146fbdecab66aa99f9fe4f78722e3607577a5375d66bd0c913"
[[package]]
name = "time"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "004cbc98f30fa233c61a38bc77e96a9106e65c88f2d3bef182ae952027e5753d"
dependencies = [
"itoa",
"libc",
"num_threads",
]
[[package]]
name = "tugger-file-manifest"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29e91ac69050080a0a9fd50af05da5baa8562347ca7b8909f8ed3adbc6ef026f"
[[package]]
name = "walkdir"
version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56"
dependencies = [
"same-file",
"winapi",
"winapi-util",
]
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
dependencies = [
"winapi",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "xml-rs"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2d7d3948613f75c98fd9328cfdcc45acc4d360655289d0a7d4ec931392200a3"

15
qt/bundle/mac/Cargo.toml Normal file
View File

@ -0,0 +1,15 @@
[package]
edition = "2021"
name = "makeapp"
version = "0.1.0"
authors = ["Ankitects Pty Ltd and contributors"]
license = "AGPL-3.0-or-later"
[dependencies]
anyhow = "1.0.53"
glob = "0.3.0"
plist = "1.3.1"
walkdir = "2.3.2"
apple-bundles= "0.6.0"
tugger-file-manifest= "0.6.0"
lazy_static = "1.4.0"

Binary file not shown.

After

Width:  |  Height:  |  Size: 93 KiB

42
qt/bundle/mac/dmg/build.sh Executable file
View File

@ -0,0 +1,42 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
set -e
# base folder with Anki.app in it
dist=$1
dmg_path=$2
script_folder=$(dirname $0)
if [ -d "/Volumes/Anki" ]
then
echo "You already have one Anki mounted, unmount it first!"
exit 1
fi
echo "bundling..."
ln -s /Applications $dist/Applications
mkdir $dist/.background
cp ${script_folder}/anki-logo-bg.png $dist/.background
cp ${script_folder}/dmg_ds_store $dist/.DS_Store
# create a writable dmg first, and modify its layout with AppleScript
hdiutil create -attach -ov -format UDRW -fs HFS+ -volname Anki -srcfolder $dist -o /tmp/Anki-rw.dmg
# announce before making the window appear
say "applescript"
open /tmp/Anki-rw.dmg
sleep 2
open ${script_folder}/set-dmg-settings.app
sleep 2
hdiutil detach "/Volumes/Anki"
sleep 1
if [ -d "/Volumes/Anki" ]
then
echo "drive did not detach"
exit 1
fi
# convert it to a read-only image
rm -rf $dmg_path
hdiutil convert /tmp/Anki-rw.dmg -ov -format ULFO -o $dmg_path
rm -rf /tmp/Anki-rw.dmg $dist

Binary file not shown.

View File

@ -0,0 +1,74 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleAllowMixedLocalizations</key>
<true/>
<key>CFBundleDevelopmentRegion</key>
<string>English</string>
<key>CFBundleExecutable</key>
<string>applet</string>
<key>CFBundleIconFile</key>
<string>applet</string>
<key>CFBundleIdentifier</key>
<string>com.apple.ScriptEditor.id.set-dmg-settings</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>set-dmg-settings</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>aplt</string>
<key>LSMinimumSystemVersionByArchitecture</key>
<dict>
<key>x86_64</key>
<string>10.6</string>
</dict>
<key>LSRequiresCarbon</key>
<true/>
<key>NSAppleEventsUsageDescription</key>
<string>This script needs to control other applications to run.</string>
<key>NSAppleMusicUsageDescription</key>
<string>This script needs access to your music to run.</string>
<key>NSCalendarsUsageDescription</key>
<string>This script needs access to your calendars to run.</string>
<key>NSCameraUsageDescription</key>
<string>This script needs access to your camera to run.</string>
<key>NSContactsUsageDescription</key>
<string>This script needs access to your contacts to run.</string>
<key>NSHomeKitUsageDescription</key>
<string>This script needs access to your HomeKit Home to run.</string>
<key>NSMicrophoneUsageDescription</key>
<string>This script needs access to your microphone to run.</string>
<key>NSPhotoLibraryUsageDescription</key>
<string>This script needs access to your photos to run.</string>
<key>NSRemindersUsageDescription</key>
<string>This script needs access to your reminders to run.</string>
<key>NSSiriUsageDescription</key>
<string>This script needs access to Siri to run.</string>
<key>NSSystemAdministrationUsageDescription</key>
<string>This script needs access to administer this system to run.</string>
<key>WindowState</key>
<dict>
<key>bundleDividerCollapsed</key>
<true/>
<key>bundlePositionOfDivider</key>
<real>0.0</real>
<key>dividerCollapsed</key>
<false/>
<key>eventLogLevel</key>
<integer>2</integer>
<key>name</key>
<string>ScriptWindowState</string>
<key>positionOfDivider</key>
<real>388</real>
<key>savedFrame</key>
<string>1308 314 700 672 0 0 2880 1597 </string>
<key>selectedTab</key>
<string>result</string>
</dict>
</dict>
</plist>

Binary file not shown.

View File

@ -0,0 +1 @@
APPLaplt

Binary file not shown.

After

Width:  |  Height:  |  Size: 362 B

View File

@ -0,0 +1,5 @@
{\rtf1\ansi\ansicpg1252\cocoartf1671
{\fonttbl}
{\colortbl;\red255\green255\blue255;}
{\*\expandedcolortbl;;}
}

View File

@ -0,0 +1,177 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>files</key>
<dict>
<key>Resources/Scripts/main.scpt</key>
<data>
BbcHsL7M8GleNWeDVHOZVEfpSUQ=
</data>
<key>Resources/applet.icns</key>
<data>
sINd6lbiqHD5dL8c6u79cFvVXhw=
</data>
<key>Resources/applet.rsrc</key>
<data>
7JOq2AjTwoRdSRoaun87Me8EbB4=
</data>
<key>Resources/description.rtfd/TXT.rtf</key>
<data>
HZLGvORC/avx2snxaACit3D0IJY=
</data>
</dict>
<key>files2</key>
<dict>
<key>Resources/Scripts/main.scpt</key>
<dict>
<key>hash</key>
<data>
BbcHsL7M8GleNWeDVHOZVEfpSUQ=
</data>
<key>hash2</key>
<data>
T6pvOxUGXyc+qwn+hdv1xPzvnYM+qo9uxLLWUkIFq3Q=
</data>
</dict>
<key>Resources/applet.icns</key>
<dict>
<key>hash</key>
<data>
sINd6lbiqHD5dL8c6u79cFvVXhw=
</data>
<key>hash2</key>
<data>
J7weZ6vlnv9r32tS5HFcyuPXl2StdDnfepLxAixlryk=
</data>
</dict>
<key>Resources/applet.rsrc</key>
<dict>
<key>hash</key>
<data>
7JOq2AjTwoRdSRoaun87Me8EbB4=
</data>
<key>hash2</key>
<data>
WvL2TvNeKuY64Sp86Cyvcmiood5xzbJmcAH3R0+gIc8=
</data>
</dict>
<key>Resources/description.rtfd/TXT.rtf</key>
<dict>
<key>hash</key>
<data>
HZLGvORC/avx2snxaACit3D0IJY=
</data>
<key>hash2</key>
<data>
XuDTd2OPOPGq65NBuXy6WuqU+bODdg+oDmBFhsZTaVU=
</data>
</dict>
</dict>
<key>rules</key>
<dict>
<key>^Resources/</key>
<true/>
<key>^Resources/.*\.lproj/</key>
<dict>
<key>optional</key>
<true/>
<key>weight</key>
<real>1000</real>
</dict>
<key>^Resources/.*\.lproj/locversion.plist$</key>
<dict>
<key>omit</key>
<true/>
<key>weight</key>
<real>1100</real>
</dict>
<key>^Resources/Base\.lproj/</key>
<dict>
<key>weight</key>
<real>1010</real>
</dict>
<key>^version.plist$</key>
<true/>
</dict>
<key>rules2</key>
<dict>
<key>.*\.dSYM($|/)</key>
<dict>
<key>weight</key>
<real>11</real>
</dict>
<key>^(.*/)?\.DS_Store$</key>
<dict>
<key>omit</key>
<true/>
<key>weight</key>
<real>2000</real>
</dict>
<key>^(Frameworks|SharedFrameworks|PlugIns|Plug-ins|XPCServices|Helpers|MacOS|Library/(Automator|Spotlight|LoginItems))/</key>
<dict>
<key>nested</key>
<true/>
<key>weight</key>
<real>10</real>
</dict>
<key>^.*</key>
<true/>
<key>^Info\.plist$</key>
<dict>
<key>omit</key>
<true/>
<key>weight</key>
<real>20</real>
</dict>
<key>^PkgInfo$</key>
<dict>
<key>omit</key>
<true/>
<key>weight</key>
<real>20</real>
</dict>
<key>^Resources/</key>
<dict>
<key>weight</key>
<real>20</real>
</dict>
<key>^Resources/.*\.lproj/</key>
<dict>
<key>optional</key>
<true/>
<key>weight</key>
<real>1000</real>
</dict>
<key>^Resources/.*\.lproj/locversion.plist$</key>
<dict>
<key>omit</key>
<true/>
<key>weight</key>
<real>1100</real>
</dict>
<key>^Resources/Base\.lproj/</key>
<dict>
<key>weight</key>
<real>1010</real>
</dict>
<key>^[^/]+$</key>
<dict>
<key>nested</key>
<true/>
<key>weight</key>
<real>10</real>
</dict>
<key>^embedded\.provisionprofile$</key>
<dict>
<key>weight</key>
<real>20</real>
</dict>
<key>^version\.plist$</key>
<dict>
<key>weight</key>
<real>20</real>
</dict>
</dict>
</dict>
</plist>

Binary file not shown.

View File

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.cs.disable-executable-page-protection</key>
<true/>
<key>com.apple.security.device.audio-input</key>
<true/>
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
<true/>
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
</dict>
</plist>

60
qt/bundle/mac/finalize.py Normal file
View File

@ -0,0 +1,60 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
# These steps are outsite bundle/build.py, so that multiple builds can be done
# in sequence without blocking on Apple's notarization, and so that the final
# dmg build can be done in bulk at the end.
import os
import subprocess
import sys
from pathlib import Path
output_root = Path(__file__).parent / "../../../.bazel/out"
dist_folder = output_root / "dist"
apps = output_root / "build" / "app"
variants = ["qt6_arm64", "qt6_amd64", "qt5_amd64"]
def staple_apps() -> None:
for variant in variants:
variant_base = apps / variant
if variant_base.exists():
if os.getenv("NOTARIZE_USER"):
subprocess.run(
[
"python",
Path(__file__).with_name("notarize.py"),
"staple",
variant_base,
],
check=True,
)
else:
print("skip missing", variant_base)
def build_dmgs() -> None:
for variant in variants:
variant_base = apps / variant
if variant_base.exists():
dmg_name_path = variant_base / "dmg_name"
dmg_name = open(dmg_name_path).read()
dmg_name_path.unlink()
subprocess.run(
[
"bash",
Path(__file__).with_name("dmg") / "build.sh",
variant_base,
dist_folder / dmg_name,
],
check=True,
)
else:
print("skip missing", variant_base)
if sys.argv[1] == "staple":
staple_apps()
elif sys.argv[1] == "dmg":
build_dmgs()

Binary file not shown.

View File

@ -0,0 +1,59 @@
{
"images" : [
{
"idiom" : "mac",
"scale" : "1x",
"size" : "16x16"
},
{
"idiom" : "mac",
"scale" : "2x",
"size" : "16x16"
},
{
"idiom" : "mac",
"scale" : "1x",
"size" : "32x32"
},
{
"idiom" : "mac",
"scale" : "2x",
"size" : "32x32"
},
{
"idiom" : "mac",
"scale" : "1x",
"size" : "128x128"
},
{
"idiom" : "mac",
"scale" : "2x",
"size" : "128x128"
},
{
"idiom" : "mac",
"scale" : "1x",
"size" : "256x256"
},
{
"idiom" : "mac",
"scale" : "2x",
"size" : "256x256"
},
{
"filename" : "round-1024-512.png",
"idiom" : "mac",
"scale" : "1x",
"size" : "512x512"
},
{
"idiom" : "mac",
"scale" : "2x",
"size" : "512x512"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 358 KiB

View File

@ -0,0 +1,6 @@
{
"info" : {
"author" : "xcode",
"version" : 1
}
}

5
qt/bundle/mac/icon/build.sh Executable file
View File

@ -0,0 +1,5 @@
#!/bin/bash
set -e
xcrun actool --app-icon AppIcon $(pwd)/Assets.xcassets --compile . --platform macosx --minimum-deployment-target 13.0 --target-device mac --output-partial-info-plist /dev/null

148
qt/bundle/mac/notarize.py Normal file
View File

@ -0,0 +1,148 @@
#!/usr/bin/env python
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import os
import re
import subprocess
import sys
import time
from pathlib import Path
USERNAME = os.getenv("NOTARIZE_USER")
PASSWORD = os.getenv("NOTARIZE_PASSWORD")
BUNDLE_ID = "net.ankiweb.dtop"
def upload(base_dir: Path, uuid_path: Path) -> None:
print("--- Prepare notarization zip")
app_dir = base_dir / "Anki.app"
zip_path = app_dir.with_suffix(".zip")
subprocess.run(["ditto", "-c", "-k", "--keepParent", app_dir, zip_path])
print("--- Upload for notarization")
try:
output = subprocess.check_output(
[
"xcrun",
"altool",
"--notarize-app",
"--primary-bundle-id",
BUNDLE_ID,
"--username",
USERNAME,
"--password",
PASSWORD,
"--file",
zip_path,
],
stderr=subprocess.STDOUT,
encoding="utf8",
)
except subprocess.CalledProcessError as e:
print("error uploading:", e.output)
sys.exit(1)
uuid = None
for line in output.splitlines():
m = re.search(r"RequestUUID = (.*)", line)
if m:
uuid = m.group(1)
if not uuid:
print("no uuid found - upload output:")
print(output)
sys.exit(1)
open(uuid_path, "w").write(uuid)
zip_path.unlink()
def _extract_status(output):
for line in output.splitlines():
m = re.search(r"Status: (.*)", line)
if m:
return m.group(1)
return None
def wait_for_success(uuid, wait=True):
while True:
print("checking status...", end="", flush=True)
try:
output = subprocess.check_output(
[
"xcrun",
"altool",
"--notarization-info",
uuid,
"--username",
USERNAME,
"--password",
PASSWORD,
],
stderr=subprocess.STDOUT,
encoding="utf8",
)
except subprocess.CalledProcessError as e:
print("error checking status:")
print(e.output)
sys.exit(1)
status = _extract_status(output)
if status is None:
print("could not extract status:")
print(output)
sys.exit(1)
if status == "invalid":
print("notarization failed:")
print(output)
sys.exit(1)
if status == "success":
print("success!")
print(output)
return
print(status)
if not wait:
return
time.sleep(30)
def staple(app_path):
try:
subprocess.check_call(
[
"xcrun",
"stapler",
"staple",
app_path,
]
)
except subprocess.CalledProcessError as e:
print("error stapling:")
print(e.output)
sys.exit(1)
cmd = sys.argv[1]
base_dir = Path(sys.argv[2])
uuid_path = base_dir / "uuid"
if cmd == "upload":
upload(base_dir, uuid_path)
elif cmd == "status":
uuid = open(uuid_path).read()
wait_for_success(uuid, False)
elif cmd == "staple":
uuid = open(uuid_path).read()
wait_for_success(uuid)
staple(base_dir / "Anki.app")
uuid_path.unlink()

View File

@ -0,0 +1,49 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDisplayName</key>
<string>Anki</string>
<key>CFBundleShortVersionString</key>
<string>2.1.46</string>
<key>LSMinimumSystemVersion</key>
<string>10.13.4</string>
<key>CFBundleDocumentTypes</key>
<array>
<dict>
<key>CFBundleTypeExtensions</key>
<array>
<string>colpkg</string>
<string>apkg</string>
<string>ankiaddon</string>
</array>
<key>CFBundleTypeIconName</key>
<string>AppIcon</string>
<key>CFBundleTypeName</key>
<string>Anki File</string>
<key>CFBundleTypeRole</key>
<string>Editor</string>
</dict>
</array>
<key>CFBundleExecutable</key>
<string>MacOS/anki</string>
<key>CFBundleIconName</key>
<string>AppIcon</string>
<key>CFBundleIdentifier</key>
<string>net.ankiweb.dtop</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>Anki</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>NSHighResolutionCapable</key>
<true/>
<key>NSMicrophoneUsageDescription</key>
<string>The microphone will only be used when you tap the record button.</string>
<key>NSRequiresAquaSystemAppearance</key>
<true/>
<key>NSSupportsAutomaticGraphicsSwitching</key>
<true/>
</dict>
</plist>

223
qt/bundle/mac/src/main.rs Normal file
View File

@ -0,0 +1,223 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
//! Munge the output of PyOxidizer into a macOS app bundle, and combine it
//! with our other runtime dependencies.
use std::{
path::{Path, PathBuf},
str::FromStr,
};
use anyhow::{bail, Context, Result};
use apple_bundles::MacOsApplicationBundleBuilder;
use plist::Value;
use tugger_file_manifest::FileEntry;
use walkdir::WalkDir;
const CODESIGN_ARGS: &[&str] = &["-vvvv", "-o", "runtime", "-s", "Developer ID Application:"];
#[derive(Clone, Copy, Debug)]
enum Variant {
StandardX86,
StandardArm,
AlternateX86,
}
impl FromStr for Variant {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s {
"qt6_arm64" => Variant::StandardArm,
"qt6_amd64" => Variant::StandardX86,
"qt5_amd64" => Variant::AlternateX86,
other => bail!("unexpected variant: {other}"),
})
}
}
impl Variant {
fn output_base(&self) -> &str {
match self {
Variant::StandardX86 => "qt6_amd64",
Variant::StandardArm => "qt6_arm64",
Variant::AlternateX86 => "qt5_amd64",
}
}
fn macos_min(&self) -> &str {
match self {
Variant::StandardX86 => "10.14.4",
Variant::StandardArm => "11",
Variant::AlternateX86 => "10.13.4",
}
}
fn qt_repo(&self) -> &str {
match self {
Variant::StandardX86 => "pyqt6.2_mac_bundle_amd64",
Variant::StandardArm => "pyqt6.2_mac_bundle_arm64",
Variant::AlternateX86 => "pyqt5.14_mac_bundle_amd64",
}
}
fn audio_repo(&self) -> &str {
match self {
Variant::StandardX86 | Variant::AlternateX86 => "audio_mac_amd64",
Variant::StandardArm => "audio_mac_arm64",
}
}
}
fn main() -> anyhow::Result<()> {
let args: Vec<_> = std::env::args().collect();
let variant: Variant = args.get(1).context("variant")?.parse()?;
let bundle_folder = PathBuf::from(args.get(2).context("bundle folder")?);
let anki_version = args.get(3).context("anki version")?;
let bazel_external = PathBuf::from(args.get(4).context("bazel external folder")?);
let plist = get_plist(anki_version);
make_app(variant, &bundle_folder, plist, &bazel_external)
}
fn make_app(
variant: Variant,
input_folder: &Path,
mut plist: plist::Dictionary,
bazel_external: &Path,
) -> Result<()> {
let output_folder = input_folder
.with_file_name("app")
.join(variant.output_base())
.join("Anki.app");
if output_folder.exists() {
std::fs::remove_dir_all(&output_folder)?;
}
std::fs::create_dir_all(&output_folder)?;
let mut builder = MacOsApplicationBundleBuilder::new("Anki")?;
plist.insert(
"LSMinimumSystemVersion".into(),
Value::from(variant.macos_min()),
);
builder.set_info_plist_from_dictionary(plist)?;
builder.add_file_resources("Assets.car", &include_bytes!("../icon/Assets.car")[..])?;
for entry in WalkDir::new(&input_folder)
.into_iter()
.map(Result::unwrap)
.filter(|e| !e.file_type().is_dir())
{
let path = entry.path();
let entry = FileEntry::try_from(path)?;
let relative_path = path.strip_prefix(&input_folder)?;
let path_str = relative_path.to_str().unwrap();
if path_str.contains("libankihelper") {
builder.add_file_macos("libankihelper.dylib", entry)?;
} else if path_str.contains("aqt/data") {
builder.add_file_resources(relative_path.strip_prefix("lib").unwrap(), entry)?;
} else {
if path_str.contains("__pycache__") {
continue;
}
builder.add_file_macos(relative_path, entry)?;
}
}
let dry_run = false;
if dry_run {
for file in builder.files().iter_files() {
println!("{}", file.path_string());
}
} else {
builder.files().materialize_files(&output_folder)?;
fix_rpath(output_folder.join("Contents/MacOS/anki"))?;
codesign_python_libs(&output_folder)?;
copy_in_audio(&output_folder, variant, bazel_external)?;
copy_in_qt(&output_folder, variant, bazel_external)?;
codesign_app(&output_folder)?;
}
Ok(())
}
/// Copy everything at the provided path into the Contents/ folder of our app.
/// Excludes standard Bazel repo files.
fn extend_app_contents(source: &Path, bundle_dir: &Path) -> Result<()> {
let status = std::process::Command::new("rsync")
.arg("-a")
.args(["--exclude", "BUILD.bazel", "--exclude", "WORKSPACE"])
.arg(format!("{}/", source.to_string_lossy()))
.arg(bundle_dir.join("Contents/"))
.status()?;
if !status.success() {
bail!("error syncing {source:?}");
}
Ok(())
}
fn copy_in_audio(bundle_dir: &Path, variant: Variant, bazel_external: &Path) -> Result<()> {
println!("Copying in audio...");
extend_app_contents(&bazel_external.join(variant.audio_repo()), bundle_dir)
}
fn copy_in_qt(bundle_dir: &Path, variant: Variant, bazel_external: &Path) -> Result<()> {
println!("Copying in Qt...");
extend_app_contents(&bazel_external.join(variant.qt_repo()), bundle_dir)
}
fn codesign_file(path: &Path, extra_args: &[&str]) -> Result<()> {
if option_env!("ANKI_CODESIGN").is_some() {
let status = std::process::Command::new("codesign")
.args(CODESIGN_ARGS)
.args(extra_args)
.arg(path.to_str().unwrap())
.status()?;
if !status.success() {
bail!("codesign failed");
}
}
Ok(())
}
fn codesign_python_libs(bundle_dir: &PathBuf) -> Result<()> {
for entry in glob::glob(
bundle_dir
.join("Contents/MacOS/lib/**/*.so")
.to_str()
.unwrap(),
)? {
let entry = entry?;
codesign_file(&entry, &[])?;
}
codesign_file(&bundle_dir.join("Contents/MacOS/libankihelper.dylib"), &[])
}
fn codesign_app(bundle_dir: &PathBuf) -> Result<()> {
codesign_file(bundle_dir, &["--entitlements", "entitlements.python.xml"])
}
fn fix_rpath(exe_path: PathBuf) -> Result<()> {
let status = std::process::Command::new("install_name_tool")
.arg("-add_rpath")
.arg("@executable_path/../Frameworks")
.arg(exe_path.to_str().unwrap())
.status()?;
assert!(status.success());
Ok(())
}
fn get_plist(anki_version: &str) -> plist::Dictionary {
let reader = std::io::Cursor::new(include_bytes!("Info.plist"));
let mut plist = plist::Value::from_reader(reader)
.unwrap()
.into_dictionary()
.unwrap();
plist.insert(
"CFBundleShortVersionString".into(),
Value::from(anki_version),
);
plist
}

View File

@ -1,4 +1,4 @@
set_build_path("../../bazel-pkg/build")
set_build_path(VARS.get("build"))
excluded_source_prefixes = [
"ctypes.test",
@ -131,6 +131,9 @@ def make_exe():
# detected libs do not need this, but we add extra afterwards
python_config.module_search_paths = ["$ORIGIN/lib"]
python_config.optimization_level = 2
if BUILD_TARGET_TRIPLE == "x86_64-apple-darwin":
# jemalloc currently fails to build when run under Rosetta
python_config.allocator_backend = "default"
python_config.run_command = "import aqt; aqt.run()"

1497
qt/bundle/win/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

14
qt/bundle/win/Cargo.toml Normal file
View File

@ -0,0 +1,14 @@
[package]
edition = "2021"
name = "makeinstall"
version = "0.1.0"
authors = ["Ankitects Pty Ltd and contributors"]
license = "AGPL-3.0-or-later"
[dependencies]
anyhow = "1.0.53"
glob = "0.3.0"
slog = "2.7.0"
slog-term = "2.8.0"
tugger-windows-codesign = "0.6.0"
walkdir = "2.3.2"

View File

Before

Width:  |  Height:  |  Size: 101 KiB

After

Width:  |  Height:  |  Size: 101 KiB

View File

@ -0,0 +1,210 @@
;; This installer was written many years ago, and it is probably worth investigating modern
;; installer alternatives.
!include "fileassoc.nsh"
!include WinVer.nsh
!include x64.nsh
; must be installed into NSIS install location
; can be found on https://github.com/ankitects/anki-bundle-extras/releases/tag/anki-2022-02-09
!include nsProcess.nsh
;--------------------------------
!pragma warning disable 6020 ; don't complain about missing installer in second invocation
; The name of the installer
Name "Anki"
Unicode true
; The file to write
OutFile "anki-setup.exe"
; The default installation directory
InstallDir "$PROGRAMFILES64\Anki"
; Remember the install location
InstallDirRegKey HKLM "Software\Anki" "Install_Dir64"
AllowSkipFiles off
!ifdef NO_COMPRESS
SetCompress off
!else
SetCompressor /solid lzma
!endif
Function .onInit
${IfNot} ${AtLeastWin10}
MessageBox MB_OK "Windows 10 or later required."
Quit
${EndIf}
${IfNot} ${RunningX64}
MessageBox MB_OK "64bit Windows is required."
Quit
${EndIf}
${nsProcess::FindProcess} "anki.exe" $R0
StrCmp $R0 0 0 notRunning
MessageBox MB_OK|MB_ICONEXCLAMATION "Anki.exe is already running. Please close it, then run the installer again." /SD IDOK
Abort
notRunning:
FunctionEnd
!ifdef WRITE_UNINSTALLER
!uninstfinalize 'copy "%1" "std\uninstall.exe"'
!endif
;--------------------------------
; Pages
Page directory
Page instfiles
;; manifest removal script shared by installer and uninstaller
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
!define UninstLog "anki.install-manifest"
Var UninstLog
!macro removeManifestFiles un
Function ${un}removeManifestFiles
IfFileExists "$INSTDIR\${UninstLog}" proceed
DetailPrint "No previous install manifest found, skipping cleanup."
return
;; this code was based on an example found on the net, which I can no longer find
proceed:
Push $R0
Push $R1
Push $R2
SetFileAttributes "$INSTDIR\${UninstLog}" NORMAL
FileOpen $UninstLog "$INSTDIR\${UninstLog}" r
StrCpy $R1 -1
GetLineCount:
ClearErrors
FileRead $UninstLog $R0
IntOp $R1 $R1 + 1
StrCpy $R0 $R0 -2
Push $R0
IfErrors 0 GetLineCount
Pop $R0
LoopRead:
StrCmp $R1 0 LoopDone
Pop $R0
;; manifest is relative to instdir
StrCpy $R0 "$INSTDIR\$R0"
IfFileExists "$R0\*.*" 0 +3
RMDir $R0 #is dir
Goto processed
IfFileExists $R0 0 +3
Delete $R0 #is file
Goto processed
processed:
IntOp $R1 $R1 - 1
Goto LoopRead
LoopDone:
FileClose $UninstLog
Delete "$INSTDIR\${UninstLog}"
RMDir "$INSTDIR"
Pop $R2
Pop $R1
Pop $R0
FunctionEnd
!macroend
!insertmacro removeManifestFiles ""
!insertmacro removeManifestFiles "un."
;--------------------------------
; The stuff to install
Section ""
SetShellVarContext all
Call removeManifestFiles
; Set output path to the installation directory.
SetOutPath $INSTDIR
CreateShortCut "$DESKTOP\Anki.lnk" "$INSTDIR\anki.exe" ""
CreateShortCut "$SMPROGRAMS\Anki.lnk" "$INSTDIR\anki.exe" ""
; Add files to installer
!ifndef WRITE_UNINSTALLER
File /r @@SRC@@\*.*
!endif
!insertmacro APP_ASSOCIATE "apkg" "anki.apkg" \
"Anki deck package" "$INSTDIR\anki.exe,0" \
"Open with Anki" "$INSTDIR\anki.exe $\"%L$\""
!insertmacro APP_ASSOCIATE "colpkg" "anki.colpkg" \
"Anki collection package" "$INSTDIR\anki.exe,0" \
"Open with Anki" "$INSTDIR\anki.exe $\"%L$\""
!insertmacro APP_ASSOCIATE "ankiaddon" "anki.ankiaddon" \
"Anki add-on" "$INSTDIR\anki.exe,0" \
"Open with Anki" "$INSTDIR\anki.exe $\"%L$\""
!insertmacro UPDATEFILEASSOC
; Write the installation path into the registry
WriteRegStr HKLM Software\Anki "Install_Dir64" "$INSTDIR"
; Write the uninstall keys for Windows
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayName" "Anki"
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayVersion" "@@VERSION@@"
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "UninstallString" '"$INSTDIR\uninstall.exe"'
WriteRegDWORD HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "NoModify" 1
WriteRegDWORD HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "NoRepair" 1
!ifdef WRITE_UNINSTALLER
WriteUninstaller "uninstall.exe"
!endif
SectionEnd ; end the section
;--------------------------------
; Uninstaller
function un.onInit
MessageBox MB_OKCANCEL "This will remove Anki's program files, but will not delete your card data. If you wish to delete your card data as well, you can do so via File>Switch Profile inside Anki first. Are you sure you wish to uninstall Anki?" IDOK next
Quit
next:
functionEnd
Section "Uninstall"
SetShellVarContext all
Call un.removeManifestFiles
; Remove other shortcuts
Delete "$DESKTOP\Anki.lnk"
Delete "$SMPROGRAMS\Anki.lnk"
; associations
!insertmacro APP_UNASSOCIATE "apkg" "anki.apkg"
!insertmacro APP_UNASSOCIATE "colpkg" "anki.colpkg"
!insertmacro APP_UNASSOCIATE "ankiaddon" "anki.ankiaddon"
!insertmacro UPDATEFILEASSOC
; try to remove top level folder if empty
RMDir "$INSTDIR"
; Remove registry keys
DeleteRegKey HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki"
DeleteRegKey HKLM Software\Anki
SectionEnd

120
qt/bundle/win/fileassoc.nsh Normal file
View File

@ -0,0 +1,120 @@
; fileassoc.nsh
; https://nsis.sourceforge.io/File_Association
; File association helper macros
; Written by Saivert
;
; Features automatic backup system and UPDATEFILEASSOC macro for
; shell change notification.
;
; |> How to use <|
; To associate a file with an application so you can double-click it in explorer, use
; the APP_ASSOCIATE macro like this:
;
; Example:
; !insertmacro APP_ASSOCIATE "txt" "myapp.textfile" "$INSTDIR\myapp.exe,0" \
; "Open with myapp" "$INSTDIR\myapp.exe $\"%1$\""
;
; Never insert the APP_ASSOCIATE macro multiple times, it is only ment
; to associate an application with a single file and using the
; the "open" verb as default. To add more verbs (actions) to a file
; use the APP_ASSOCIATE_ADDVERB macro.
;
; Example:
; !insertmacro APP_ASSOCIATE_ADDVERB "myapp.textfile" "edit" "Edit with myapp" \
; "$INSTDIR\myapp.exe /edit $\"%1$\""
;
; To have access to more options when registering the file association use the
; APP_ASSOCIATE_EX macro. Here you can specify the verb and what verb is to be the
; standard action (default verb).
;
; And finally: To remove the association from the registry use the APP_UNASSOCIATE
; macro. Here is another example just to wrap it up:
; !insertmacro APP_UNASSOCIATE "txt" "myapp.textfile"
;
; |> Note <|
; When defining your file class string always use the short form of your application title
; then a period (dot) and the type of file. This keeps the file class sort of unique.
; Examples:
; Winamp.Playlist
; NSIS.Script
; Photoshop.JPEGFile
;
; |> Tech info <|
; The registry key layout for a file association is:
; HKEY_CLASSES_ROOT
; <applicationID> = <"description">
; shell
; <verb> = <"menu-item text">
; command = <"command string">
;
!macro APP_ASSOCIATE EXT FILECLASS DESCRIPTION ICON COMMANDTEXT COMMAND
; Backup the previously associated file class
ReadRegStr $R0 HKCR ".${EXT}" ""
WriteRegStr HKCR ".${EXT}" "${FILECLASS}_backup" "$R0"
WriteRegStr HKCR ".${EXT}" "" "${FILECLASS}"
WriteRegStr HKCR "${FILECLASS}" "" `${DESCRIPTION}`
WriteRegStr HKCR "${FILECLASS}\DefaultIcon" "" `${ICON}`
WriteRegStr HKCR "${FILECLASS}\shell" "" "open"
WriteRegStr HKCR "${FILECLASS}\shell\open" "" `${COMMANDTEXT}`
WriteRegStr HKCR "${FILECLASS}\shell\open\command" "" `${COMMAND}`
!macroend
!macro APP_ASSOCIATE_EX EXT FILECLASS DESCRIPTION ICON VERB DEFAULTVERB SHELLNEW COMMANDTEXT COMMAND
; Backup the previously associated file class
ReadRegStr $R0 HKCR ".${EXT}" ""
WriteRegStr HKCR ".${EXT}" "${FILECLASS}_backup" "$R0"
WriteRegStr HKCR ".${EXT}" "" "${FILECLASS}"
StrCmp "${SHELLNEW}" "0" +2
WriteRegStr HKCR ".${EXT}\ShellNew" "NullFile" ""
WriteRegStr HKCR "${FILECLASS}" "" `${DESCRIPTION}`
WriteRegStr HKCR "${FILECLASS}\DefaultIcon" "" `${ICON}`
WriteRegStr HKCR "${FILECLASS}\shell" "" `${DEFAULTVERB}`
WriteRegStr HKCR "${FILECLASS}\shell\${VERB}" "" `${COMMANDTEXT}`
WriteRegStr HKCR "${FILECLASS}\shell\${VERB}\command" "" `${COMMAND}`
!macroend
!macro APP_ASSOCIATE_ADDVERB FILECLASS VERB COMMANDTEXT COMMAND
WriteRegStr HKCR "${FILECLASS}\shell\${VERB}" "" `${COMMANDTEXT}`
WriteRegStr HKCR "${FILECLASS}\shell\${VERB}\command" "" `${COMMAND}`
!macroend
!macro APP_ASSOCIATE_REMOVEVERB FILECLASS VERB
DeleteRegKey HKCR `${FILECLASS}\shell\${VERB}`
!macroend
!macro APP_UNASSOCIATE EXT FILECLASS
; Backup the previously associated file class
ReadRegStr $R0 HKCR ".${EXT}" `${FILECLASS}_backup`
WriteRegStr HKCR ".${EXT}" "" "$R0"
DeleteRegKey HKCR `${FILECLASS}`
!macroend
!macro APP_ASSOCIATE_GETFILECLASS OUTPUT EXT
ReadRegStr ${OUTPUT} HKCR ".${EXT}" ""
!macroend
; !defines for use with SHChangeNotify
!ifdef SHCNE_ASSOCCHANGED
!undef SHCNE_ASSOCCHANGED
!endif
!define SHCNE_ASSOCCHANGED 0x08000000
!ifdef SHCNF_FLUSH
!undef SHCNF_FLUSH
!endif
!define SHCNF_FLUSH 0x1000
!macro UPDATEFILEASSOC
; Using the system.dll plugin to call the SHChangeNotify Win32 API function so we
; can update the shell.
System::Call "shell32::SHChangeNotify(i,i,i,i) (${SHCNE_ASSOCCHANGED}, ${SHCNF_FLUSH}, 0, 0)"
!macroend
;EOF

View File

@ -0,0 +1,4 @@
# this is not supported on stable Rust, and is ignored by the Bazel rules; it is only
# useful for manual invocation with 'cargo +nightly fmt'
imports_granularity = "Crate"
group_imports = "StdExternalCrate"

222
qt/bundle/win/src/main.rs Normal file
View File

@ -0,0 +1,222 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{
fs,
io::prelude::*,
path::{Component, Path, PathBuf, Prefix},
process::Command,
};
use anyhow::{bail, Context, Result};
use slog::*;
use tugger_windows_codesign::{CodeSigningCertificate, SigntoolSign, SystemStore, TimestampServer};
use walkdir::WalkDir;
fn main() -> anyhow::Result<()> {
let plain = slog_term::PlainSyncDecorator::new(std::io::stdout());
let logger = Logger::root(slog_term::FullFormat::new(plain).build().fuse(), o!());
let args: Vec<_> = std::env::args().collect();
let build_folder = PathBuf::from(args.get(1).context("build folder")?);
let bazel_external = PathBuf::from(args.get(2).context("bazel external")?);
// bundle/build.py folder
let build_py_folder = PathBuf::from(args.get(3).context("build_py_folder")?);
let version = args.get(4).context("version")?;
let std_folder = build_folder.join("std");
let alt_folder = build_folder.join("alt");
let folders = &[&std_folder, &alt_folder];
for folder in folders {
fs::copy(
build_py_folder.join("win").join("anki-console.bat"),
folder.join("anki-console.bat"),
)
.context("anki-console")?;
}
println!("--- Copy in audio");
copy_in_audio(&std_folder, &bazel_external)?;
copy_in_audio(&alt_folder, &bazel_external)?;
println!("--- Build uninstaller");
build_installer(&std_folder, &build_folder, version, true).context("uninstaller")?;
// sign the anki.exe and uninstaller.exe in std, then copy into alt
println!("--- Sign binaries");
codesign(
&logger,
&[
&std_folder.join("anki.exe"),
&std_folder.join("uninstall.exe"),
],
)?;
for fname in &["anki.exe", "uninstall.exe"] {
fs::copy(std_folder.join(fname), alt_folder.join(fname))
.with_context(|| format!("copy {fname}"))?;
}
println!("--- Build manifest");
for folder in folders {
build_manifest(folder).context("manifest")?;
}
let mut installer_paths = vec![];
for (folder, variant) in folders.iter().zip(&["qt6", "qt5"]) {
println!(
"--- Build installer for {}",
folder.file_name().unwrap().to_str().unwrap()
);
build_installer(folder, &build_folder, version, false)?;
let installer_filename = format!("anki-{version}-windows-{variant}.exe");
let installer_path = build_folder
.join("..")
.join("dist")
.join(installer_filename);
fs::rename(build_folder.join("anki-setup.exe"), &installer_path)
.context("rename installer")?;
installer_paths.push(installer_path);
}
println!("--- Sign installers");
codesign(&logger, &installer_paths)?;
Ok(())
}
fn build_installer(
variant_folder: &Path,
build_folder: &Path,
version: &str,
uninstaller: bool,
) -> Result<()> {
let rendered_nsi = include_str!("../anki.template.nsi")
.replace("@@SRC@@", variant_folder.to_str().unwrap())
.replace("@@VERSION@@", version);
let rendered_nsi_path = build_folder.join("anki.nsi");
fs::write(&rendered_nsi_path, rendered_nsi).context("anki.nsi")?;
fs::write(
build_folder.join("fileassoc.nsh"),
include_str!("../fileassoc.nsh"),
)?;
let mut cmd = Command::new("c:/program files (x86)/nsis/makensis.exe");
cmd.arg("-V3");
if uninstaller {
cmd.arg("-DWRITE_UNINSTALLER");
};
if option_env!("NO_COMPRESS").is_some() {
cmd.arg("-DNO_COMPRESS");
}
cmd.arg(rendered_nsi_path);
let status = cmd.status()?;
if !status.success() {
bail!("makensis failed");
}
Ok(())
}
/// Copy everything at the provided path into the bundle dir.
/// Excludes standard Bazel repo files.
fn extend_app_contents(source: &Path, bundle_dir: &Path) -> Result<()> {
let status = Command::new("rsync")
.arg("-a")
.args(["--exclude", "BUILD.bazel", "--exclude", "WORKSPACE"])
.arg(format!("{}/", path_for_rsync(source, true)?))
.arg(format!("{}/", path_for_rsync(bundle_dir, true)?))
.status()?;
if !status.success() {
bail!("error syncing {source:?}");
}
Ok(())
}
/// Munge path into a format rsync expects on Windows.
fn path_for_rsync(path: &Path, trailing_slash: bool) -> Result<String> {
let mut components = path.components();
let mut drive = None;
if let Some(Component::Prefix(prefix)) = components.next() {
if let Prefix::Disk(letter) = prefix.kind() {
drive = Some(char::from(letter));
}
};
let drive = drive.context("missing drive letter")?;
let remaining_path: PathBuf = components.collect();
Ok(format!(
"/{}{}{}",
drive,
remaining_path
.to_str()
.context("remaining_path")?
.replace("\\", "/"),
if trailing_slash { "/" } else { "" }
))
}
fn copy_in_audio(bundle_dir: &Path, bazel_external: &Path) -> Result<()> {
extend_app_contents(&bazel_external.join("audio_win_amd64"), bundle_dir)
}
fn codesign(logger: &Logger, paths: &[impl AsRef<Path>]) -> Result<()> {
if option_env!("ANKI_CODESIGN").is_none() {
return Ok(());
}
let cert = CodeSigningCertificate::Sha1Thumbprint(
SystemStore::My,
"60abdb9cb52b7dc13550e8838486a00e693770d9".into(),
);
let mut sign = SigntoolSign::new(cert);
sign.file_digest_algorithm("sha256")
.timestamp_server(TimestampServer::Rfc3161(
"http://time.certum.pl".into(),
"sha256".into(),
))
.verbose();
paths.iter().for_each(|path| {
sign.sign_file(path);
});
sign.run(logger)
}
// FIXME: check uninstall.exe required or not
fn build_manifest(base_path: &Path) -> Result<()> {
let mut buf = vec![];
for entry in WalkDir::new(base_path)
.min_depth(1)
.sort_by_file_name()
.into_iter()
{
let entry = entry?;
let path = entry.path();
let relative_path = path.strip_prefix(base_path)?;
write!(
&mut buf,
"{}\r\n",
relative_path.to_str().context("relative_path utf8")?
)?;
}
fs::write(base_path.join("anki.install-manifest"), buf)?;
Ok(())
}
#[cfg(test)]
mod test {
#[allow(unused_imports)]
use super::*;
#[test]
#[cfg(windows)]
fn test_path_for_rsync() -> Result<()> {
assert_eq!(
path_for_rsync(Path::new("c:\\foo\\bar"), false)?,
"/C/foo/bar"
);
assert_eq!(
path_for_rsync(Path::new("c:\\foo\\bar"), true)?,
"/C/foo/bar/"
);
Ok(())
}
}

View File

@ -1,21 +0,0 @@
:: ensure wheels are built and set up Rust env
pushd ..\..
call scripts\build || exit /b
call scripts\cargo-env
set ROOT=%CD%
popd
:: ensure venv exists
set OUTPUT_ROOT=%ROOT%/bazel-pkg
set VENV=%OUTPUT_ROOT%/venv
if not exist %VENV% (
mkdir %OUTPUT_ROOT%
pushd %ROOT%
call scripts\python -m venv %VENV% || exit /b
popd
)
:: run the rest of the build in Python
FOR /F "tokens=*" %%g IN ('call ..\..\bazel.bat info output_base --ui_event_filters=-INFO') do (SET BAZEL_EXTERNAL=%%g/external)
call ..\..\bazel.bat query @pyqt515//:*
%VENV%\scripts\python build.py %ROOT% %BAZEL_EXTERNAL% || exit /b

View File

@ -1,264 +0,0 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
from __future__ import annotations
import glob
import os
import platform
import re
import shutil
import subprocess
import sys
from pathlib import Path
is_win = sys.platform == "win32"
workspace = Path(sys.argv[1])
output_root = workspace / "bazel-pkg"
dist_folder = output_root / "dist"
venv = output_root / "venv"
cargo_target = output_root / "target"
bazel_external = Path(sys.argv[2])
artifacts = output_root / "artifacts"
pyo3_config = output_root / "pyo3-build-config-file.txt"
if is_win:
python_bin_folder = venv / "scripts"
os.environ["PATH"] += fr";{os.getenv('USERPROFILE')}\.cargo\bin"
cargo_features = "build-mode-prebuilt-artifacts"
else:
python_bin_folder = venv / "bin"
os.environ["PATH"] += f":{os.getenv('HOME')}/.cargo/bin"
cargo_features = (
"build-mode-prebuilt-artifacts global-allocator-jemalloc allocator-jemalloc"
)
os.environ["PYOXIDIZER_ARTIFACT_DIR"] = str(artifacts)
os.environ["PYOXIDIZER_CONFIG"] = str(Path(os.getcwd()) / "pyoxidizer.bzl")
os.environ["CARGO_TARGET_DIR"] = str(cargo_target)
# OS-specific things
pyqt5_folder_name = "pyqt515"
pyqt6_folder_path = bazel_external / "pyqt6" / "PyQt6"
is_lin = False
arm64_linux = False
if is_win:
os.environ["TARGET"] = "x86_64-pc-windows-msvc"
elif sys.platform.startswith("darwin"):
if platform.machine() == "arm64":
pyqt5_folder_name = None
os.environ["TARGET"] = "aarch64-apple-darwin"
os.environ["MACOSX_DEPLOYMENT_TARGET"] = "11.0"
else:
pyqt5_folder_name = "pyqt514"
os.environ["TARGET"] = "x86_64-apple-darwin"
os.environ["MACOSX_DEPLOYMENT_TARGET"] = "10.13"
else:
is_lin = True
if platform.machine() == "x86_64":
os.environ["TARGET"] = "x86_64-unknown-linux-gnu"
else:
os.environ["TARGET"] = "aarch64-unknown-linux-gnu"
pyqt5_folder_name = None
pyqt6_folder_path = None
arm64_linux = True
python = python_bin_folder / "python"
pip = python_bin_folder / "pip"
artifacts_in_build = (
output_root
/ "build"
/ os.getenv("TARGET")
/ "release"
/ "resources"
/ "extra_files"
)
def build_pyoxidizer():
subprocess.run(
[
"cargo",
"install",
"--git",
"https://github.com/ankitects/PyOxidizer.git",
"--rev",
# when updating, make sure Cargo.toml updated too
"eb26dd7cd1290de6503869f3d719eabcec45e139",
"pyoxidizer",
],
check=True,
)
def install_wheels_into_venv():
# Pip's handling of hashes is somewhat broken. It spots the hashes in the constraints
# file and forces all files to have a hash. We can manually hash our generated wheels
# and pass them in with hashes, but it still breaks, because the 'protobuf>=3.17'
# specifier in the pylib wheel is not allowed. Nevermind that a specific version is
# included in the constraints file we pass along! To get things working, we're
# forced to strip the hashes out before installing. This should be safe, as the files
# have already been validated as part of the build process.
constraints = output_root / "deps_without_hashes.txt"
with open(workspace / "python" / "requirements.txt") as f:
buf = f.read()
with open(constraints, "w") as f:
extracted = re.findall("^(\S+==\S+) ", buf, flags=re.M)
f.write("\n".join(extracted))
# install wheels and upgrade any deps
wheels = glob.glob(str(workspace / "bazel-dist" / "*.whl"))
subprocess.run(
[pip, "install", "--upgrade", "-c", constraints, *wheels], check=True
)
# always reinstall our wheels
subprocess.run(
[pip, "install", "--force-reinstall", "--no-deps", *wheels], check=True
)
# pypi protobuf lacks C extension on darwin-arm; use a locally built version
protobuf = Path.home() / "protobuf-3.19.1-cp39-cp39-macosx_11_0_arm64.whl"
if protobuf.exists():
subprocess.run(
[pip, "install", "--force-reinstall", "--no-deps", protobuf], check=True
)
if arm64_linux:
# orjson doesn't get packaged correctly; remove it and we'll
# copy a copy in later
subprocess.run([pip, "uninstall", "-y", "orjson"], check=True)
def build_artifacts():
if os.path.exists(artifacts):
shutil.rmtree(artifacts)
if os.path.exists(artifacts_in_build):
shutil.rmtree(artifacts_in_build)
subprocess.run(
[
"pyoxidizer",
"--system-rust",
"run-build-script",
"build.rs",
"--var",
"venv",
venv,
],
check=True,
env=os.environ
| dict(
CARGO_MANIFEST_DIR=".",
OUT_DIR=str(artifacts),
PROFILE="release",
PYO3_PYTHON=str(python),
),
)
existing_config = None
if os.path.exists(pyo3_config):
with open(pyo3_config) as f:
existing_config = f.read()
with open(artifacts / "pyo3-build-config-file.txt") as f:
new_config = f.read()
# avoid bumping mtime, which triggers crate recompile
if new_config != existing_config:
with open(pyo3_config, "w") as f:
f.write(new_config)
def build_pkg():
subprocess.run(
[
"cargo",
"build",
"--release",
"--no-default-features",
"--features",
cargo_features,
],
check=True,
env=os.environ | dict(PYO3_CONFIG_FILE=str(pyo3_config)),
)
def adj_path_for_windows_rsync(path: Path) -> str:
if not is_win:
return str(path)
path = path.absolute()
rest = str(path)[2:].replace("\\", "/")
return f"/{path.drive[0]}{rest}"
def merge_into_dist(output_folder: Path, pyqt_src_path: Path | None):
if not output_folder.exists():
output_folder.mkdir(parents=True)
# PyQt
if pyqt_src_path:
subprocess.run(
[
"rsync",
"-a",
"--delete",
"--exclude-from",
"qt.exclude",
adj_path_for_windows_rsync(pyqt_src_path),
adj_path_for_windows_rsync(output_folder / "lib") + "/",
],
check=True,
)
# Executable and other resources
resources = [
adj_path_for_windows_rsync(
cargo_target / "release" / ("anki.exe" if is_win else "anki")
),
adj_path_for_windows_rsync(artifacts_in_build) + "/",
]
if is_win:
resources.append(adj_path_for_windows_rsync(Path("win")) + "/")
elif is_lin:
resources.append("lin/")
subprocess.run(
[
"rsync",
"-a",
"--delete",
"--exclude",
"PyQt6",
"--exclude",
"PyQt5",
*resources,
adj_path_for_windows_rsync(output_folder) + "/",
],
check=True,
)
# Linux ARM workarounds
if arm64_linux:
# copy orjson ends up broken; copy from venv
subprocess.run(
[
"rsync",
"-a",
"--delete",
os.path.expanduser("~/orjson"),
output_folder / "lib/",
],
check=True,
)
# Ensure all files are world-readable
if not is_win:
subprocess.run(["chmod", "-R", "a+r", output_folder])
build_pyoxidizer()
install_wheels_into_venv()
build_artifacts()
build_pkg()
merge_into_dist(dist_folder / "std", pyqt6_folder_path)
if pyqt5_folder_name:
merge_into_dist(dist_folder / "alt", bazel_external / pyqt5_folder_name / "PyQt5")

View File

@ -1,29 +0,0 @@
#!/bin/bash
set -e
cd $(dirname $0)
ROOT=$(pwd)/../..
OUTPUT_ROOT=$ROOT/bazel-pkg
VENV=$OUTPUT_ROOT/venv
BAZEL_EXTERNAL=$(bazel info output_base --ui_event_filters=-INFO)/external
# ensure the wheels are built
(cd $ROOT && ./scripts/build)
# ensure venv exists
test -d $VENV || (
mkdir -p $OUTPUT_ROOT
(cd $ROOT && ./scripts/python -m venv $VENV)
)
# run the rest of the build in Python
. $ROOT/scripts/cargo-env
if [[ "$OSTYPE" == "darwin"* ]]; then
if [ $(uname -m) != "arm64" ]; then
bazel query @pyqt514//:* > /dev/null
fi
else
bazel query @pyqt515//:* > /dev/null
fi
$VENV/bin/python build.py $ROOT $BAZEL_EXTERNAL

View File

@ -1,26 +0,0 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import os
import sys
from pathlib import Path
def build_manifest(top: Path) -> None:
manifest = []
for root, dirnames, fnames in os.walk(top, topdown=True):
relroot = root[len(str(top)) + 1 :]
# if not top level, add folder
if relroot:
manifest.append(relroot)
# then the files
for fname in fnames:
path = os.path.join(relroot, fname)
manifest.append(path)
with open(top / "anki.install-manifest", "w") as file:
file.write("\n".join(manifest) + "\n")
if __name__ == "__main__":
build_manifest(Path(sys.argv[1]))

View File

@ -33,7 +33,7 @@ if __name__ == "__main__":
"aqt",
"tests",
"tools",
"package",
"bundle",
]
+ args,
check=False,
@ -51,7 +51,7 @@ if __name__ == "__main__":
"aqt",
"tests",
"tools",
"package",
"bundle",
]
+ args,
check=False,

View File

@ -22,6 +22,7 @@ for line in open(input_scss):
and not ":root" in line
and "Copyright" not in line
and "License" not in line
and "color-scheme" not in line
):
print("failed to match", line)
continue

View File

@ -151,3 +151,98 @@ exports_files(["l10n.toml"])
],
sha256 = qtftl_i18n_zip_csum,
)
# binary bundle
################
maybe(
http_archive,
name = "pyoxidizer",
sha256 = "9f7951473d88c7989dc80199146f82020226a3b2425474fd33b6bcbd8fdd1b1c",
urls = [
# when changing this, the commit hash needs to be updated in qt/bundle/Cargo.toml
"https://github.com/ankitects/PyOxidizer/archive/refs/tags/anki-2021-12-08.tar.gz",
],
strip_prefix = "PyOxidizer-anki-2021-12-08",
build_file_content = " ",
)
maybe(
http_archive,
name = "bundle_extras_linux_amd64",
build_file_content = " ",
urls = [
"https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-02-09/linux-amd64.tar.gz",
],
sha256 = "cbfb41fb750ae19b381f8137bd307e1167fdc68420052977f6e1887537a131b0",
)
maybe(
http_archive,
name = "audio_win_amd64",
build_file_content = " ",
urls = [
"https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-02-09/audio-win-amd64.tar.gz",
],
sha256 = "0815a601baba05e03bc36b568cdc2332b1cf4aa17125fc33c69de125f8dd687f",
)
maybe(
http_archive,
name = "protobuf_wheel_mac_arm64",
build_file_content = " ",
urls = [
"https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-02-09/protobuf-wheel-mac-arm64.tar",
],
sha256 = "401d1cd6d949af463b3945f0d5dc887185b27fa5478cb6847bf94f680ea797b4",
)
maybe(
http_archive,
name = "audio_mac_amd64",
build_file_content = " ",
urls = [
"https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-02-09/audio-mac-amd64.tar.gz",
],
sha256 = "d9310cbd6bed09d6d36deb8b7611bffbd161628512b1bf8d7becfdf78b5cd1dd",
)
maybe(
http_archive,
name = "audio_mac_arm64",
build_file_content = " ",
urls = [
"https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-02-09/audio-mac-arm64.tar.gz",
],
sha256 = "c30a772132a16fa79d9a1e60f5dce2f91fe8077e2709a8f39ef499d49f6a4b0e",
)
maybe(
http_archive,
name = "pyqt6.2_mac_bundle_amd64",
build_file_content = " ",
urls = [
"https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-02-09/pyqt6.2-mac-amd64.tar.gz",
],
sha256 = "c7bf899eee33fcb3b5848f5d3e5fc390012efc05c2308e4349b7bbd5939c85f0",
)
maybe(
http_archive,
name = "pyqt6.2_mac_bundle_arm64",
build_file_content = " ",
urls = [
"https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-02-09/pyqt6.2-mac-arm64.tar.gz",
],
sha256 = "7a4b7d5bd65c83fd16cf7e56929183ef0d1d7bb67f9deea8f2482d7378e0ea02",
)
maybe(
http_archive,
name = "pyqt5.14_mac_bundle_amd64",
build_file_content = " ",
urls = [
"https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-02-09/pyqt5.14-mac-amd64.tar.gz",
],
sha256 = "474951bed79ddb9570ee4c5a6079041772551ea77e77171d9e33d6f5e7877ec1",
)

View File

@ -1,2 +1,7 @@
@echo off
pushd "%~dp0"
call tools\setup-env.bat
set PYTHONWARNINGS=default
call .\bazel.bat run %BUILDARGS% //qt:runanki -k -- %*
bazel run %BUILDARGS% //qt:runanki -k -- %* || exit /b 1
popd

View File

@ -1,12 +0,0 @@
#!/bin/bash
set -e
test -e WORKSPACE || (
echo "Run from project root"
exit 1
)
rm -rf bazel-dist
bazel build --config opt dist
tar xvf .bazel/bin/dist.tar

View File

@ -1,12 +0,0 @@
@echo off
if not exist WORKSPACE (
echo Run from project root
exit /b 1
)
rd /s /q bazel-dist
set BUILDARGS=-k -c opt dist --color=yes
call .\bazel build %BUILDARGS% || exit /b 1
tar xvf ..\bazel\anki\bin\dist.tar || exit /b 1

View File

@ -1,4 +0,0 @@
rem Run this from the repo root folder
FOR /F "tokens=*" %%g IN ('call bazel.bat info output_base --ui_event_filters=-INFO') do (SET BAZEL_EXTERNAL=%%g/external)
set PATH=%BAZEL_EXTERNAL%\rust_windows_x86_64\bin;%PATH%

View File

@ -1,28 +0,0 @@
# Building in Docker
This folder contains a script for building Anki inside a Docker container.
It works by creating an image with the required dependencies, and then runs the
build with the source folder mounted into the image. This will cause files to be
written into `bazel-\*` and `node_modules` in the source folder as the build proceeds.
The advantage of doing it this way is that most of the efficiency of building
outside Docker is retained - you can make minor changes and run the build again,
and only the changed parts need to be rebuilt.
If you're looking for a fully isolated build, [this other
approach](../../docs/docker/README.md) in the docs folder may suit you better. As
it also includes runtime dependencies, it may be a useful reference for libraries
you'll need to install before running Anki.
# Usage
Ensure Docker is installed on your machine, and your user has permission to connect
to Docker. Then run the following command from the root folder of this source repo:
```
$ scripts/docker/build.sh amd64
```
The resulting wheels will be written into bazel-dist. See
[Development](../docs/development.md) for information on how to install them.
If you're on an ARM Linux machine, replace amd64 with arm64.

View File

@ -1,12 +0,0 @@
#!/bin/bash
set -e
rm -rf bazel-dist
bazel build -c opt dist --symlink_prefix=bazel-docker/links/ \
--experimental_no_product_name_out_symlink
tar xvf bazel-docker/links/bin/dist.tar
if [ "$PACKAGE" != "" ]; then
(cd qt/package && ./build.sh)
fi
bazel shutdown

View File

@ -1,26 +0,0 @@
#!/bin/bash
set -e
test -e WORKSPACE || (
echo "Run from project root"
exit 1
)
arch=$1
if [ "$arch" != "amd64" -a "$arch" != "arm64" ]; then
echo "usage: build [amd64|arm64]"
exit 1
fi
rm -rf bazel-dist
export DOCKER_BUILDKIT=1
docker build --tag ankibuild --file scripts/docker/Dockerfile.$arch \
--build-arg uid=$(id -u) --build-arg gid=$(id -g) \
scripts/docker
docker run --rm -it -e PACKAGE=$PACKAGE \
--mount type=bind,source="$(pwd)",target=/code \
ankibuild

View File

@ -1 +0,0 @@
call bazel run python --ui_event_filters=-INFO -- %*

View File

@ -1,8 +0,0 @@
#!/bin/bash
# note: 5.14 is not available on Windows for Python 3.9
set -e
export PYTHONWARNINGS=default
bazel run $BUILDARGS //qt:runanki_qt514 -- $*

View File

@ -1,6 +0,0 @@
#!/bin/bash
set -e
export PYTHONWARNINGS=default
bazel run $BUILDARGS //qt:runanki_qt515 -- $*

View File

@ -1,4 +0,0 @@
REM run this from the scripts folder, not from root
set PYTHONWARNINGS=default
call ..\bazel.bat run %BUILDARGS% //qt:runanki_qt515 -k -- %*

View File

@ -1,2 +0,0 @@
set BUILDARGS=-c opt
call .\run.bat %*

20
tools/bazel Executable file
View File

@ -0,0 +1,20 @@
#!/bin/bash
set -e
# When building under Rosetta, use a separate output root, so that repo rules don't
# need to be run again when switching between x86_64 and arm64 builds.
extra_args=""
if [[ $OSTYPE == 'darwin'* ]]; then
if [ $(uname -m) = x86_64 -a "$(sysctl -in sysctl.proc_translated)" = 1 ]; then
extra_args="--output_base=$HOME/.cache/anki-rosetta"
fi
fi
# Bazelisk will place the tools folder at the front of the path. This breaks
# genrule() invocations like //:buildinfo, as they call 'bazel run python', which
# fails as BAZEL_REAL is not passed to the child process. Work around it by removing
# the tools folder from the path.
export PATH=$(echo "$PATH" | sed 's@^.*/tools:@@')
exec $BAZEL_REAL $extra_args "$@"

Some files were not shown because too many files have changed in this diff Show More