Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 9 additions & 3 deletions .github/docker/ci/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ RUN apt-get update && \
bc kmod cpio flex bison libssl-dev libelf-dev \
binutils-aarch64-linux-gnu \
clang-format \
shellcheck \
git && \
rm -rf /var/lib/apt/lists/*

Expand All @@ -44,15 +45,20 @@ RUN mkdir -p "${ANDROID_HOME}/cmdline-tools" && \
chmod -R a+rx "${ANDROID_HOME}"

# ── Rust toolchain (for zygisk) ──────────────────────────────────────
# Pin both rustc and cargo-ndk versions so monthly image rebuilds don't
# silently drift. Bump together with local toolchain when needed; CI
# stays reproducible against an exact version.
ENV RUSTUP_HOME=/usr/local/rustup \
CARGO_HOME=/usr/local/cargo \
PATH=/usr/local/cargo/bin:$PATH
PATH=/usr/local/cargo/bin:$PATH \
RUST_VERSION=1.95.0 \
CARGO_NDK_VERSION=4.1.2

RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | \
sh -s -- -y --default-toolchain stable --profile minimal --no-modify-path && \
sh -s -- -y --default-toolchain "${RUST_VERSION}" --profile minimal --no-modify-path && \
rustup target add aarch64-linux-android && \
rustup component add rustfmt clippy && \
cargo install cargo-ndk --locked && \
cargo install cargo-ndk --version "${CARGO_NDK_VERSION}" --locked && \
chmod -R a+w /usr/local/rustup /usr/local/cargo

# ── Android NDK (for zygisk) ─────────────────────────────────────────
Expand Down
46 changes: 39 additions & 7 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -86,15 +86,44 @@ jobs:
run: |
cd zygisk && cargo fmt --check
cd ../lsposed/native && cargo fmt --check
# `--tests` so generated test modules are also linted
# (catches `bool_assert_comparison`-style regressions in
# codegen output).
- name: clippy (zygisk)
run: cd zygisk && cargo ndk -t arm64-v8a clippy -- -D warnings
run: cd zygisk && cargo ndk -t arm64-v8a clippy --tests -- -D warnings
- name: clippy (lsposed native)
run: cd lsposed/native && cargo ndk -t arm64-v8a clippy -- -D warnings
run: cd lsposed/native && cargo ndk -t arm64-v8a clippy --tests -- -D warnings
- name: cargo test (zygisk)
run: cd zygisk && cargo test
- name: cargo test (lsposed native)
run: cd lsposed/native && cargo test

# Shell — module-side scripts (Magisk/KSU) and host-side dev tooling.
# shellcheck is preinstalled in the CI image (apt). The list is
# explicit so we don't accidentally pick up vendored .sh from
# zygisk/third_party.
- name: shellcheck
# SC2034: `SKIPUNZIP` and the `for i in seq …` counters look unused
# to shellcheck — Magisk reads SKIPUNZIP externally, the
# counters are loop iterators we don't read by name.
# SC3043: `local` is "not POSIX" but Android's /system/bin/sh
# (mksh on Pixel) supports it, and our module-side
# scripts always run there.
# The inline `apt-get install` is a one-job fallback for the
# window between this PR landing and the next ci-image rebuild
# (the Dockerfile bake also adds shellcheck). After the rebuild
# this no-ops in <1s.
run: |
if ! command -v shellcheck >/dev/null; then
apt-get update -qq && apt-get install -y --no-install-recommends shellcheck >/dev/null
fi
shellcheck -x -e SC2034,SC3043 \
kmod/module/customize.sh kmod/module/post-fs-data.sh kmod/module/service.sh \
zygisk/module/customize.sh zygisk/module/service.sh \
portshide/module/customize.sh portshide/module/service.sh \
portshide/module/uninstall.sh portshide/module/vpnhide_ports_apply.sh \
scripts/clean-device.sh scripts/update-json.sh

# C (kernel module)
- name: clang-format
run: clang-format --dry-run --Werror kmod/vpnhide_kmod.c
Expand Down Expand Up @@ -250,11 +279,14 @@ jobs:
-keyalg RSA -keysize 4096 -validity 365 \
-dname "CN=vpnhide-fork-ci, O=vpnhide, C=US"
fi
cat > "$GITHUB_WORKSPACE/lsposed/keystore.properties" <<EOF
password=$KEYSTORE_PASSWORD
keyAlias=$KEY_ALIAS
storeFile=$KEYSTORE_PATH
EOF
# Build via printf — `%s` swallows the value verbatim with no
# shell expansion of $, backticks, backslashes, or `!`. Heredoc
# without single-quoted EOF would re-expand each line.
{
printf 'password=%s\n' "$KEYSTORE_PASSWORD"
printf 'keyAlias=%s\n' "$KEY_ALIAS"
printf 'storeFile=%s\n' "$KEYSTORE_PATH"
} > "$GITHUB_WORKSPACE/lsposed/keystore.properties"

# Release tags get the full assembleRelease (R8/ProGuard, signed APK
# ready for the GitHub release). PRs and main pushes get assembleDebug
Expand Down
84 changes: 42 additions & 42 deletions lsposed/native/src/generated/iface_lists.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,47 +110,47 @@ mod tests {

#[test]
fn generated_vectors() {
assert_eq!(matches_vpn(b"tun0"), true, "matches_vpn('tun0')");
assert_eq!(matches_vpn(b"tun"), true, "matches_vpn('tun')");
assert_eq!(matches_vpn(b"tun1234"), true, "matches_vpn('tun1234')");
assert_eq!(matches_vpn(b"tap0"), true, "matches_vpn('tap0')");
assert_eq!(matches_vpn(b"wg0"), true, "matches_vpn('wg0')");
assert_eq!(matches_vpn(b"wg-client"), true, "matches_vpn('wg-client')");
assert_eq!(matches_vpn(b"ppp0"), true, "matches_vpn('ppp0')");
assert_eq!(matches_vpn(b"ipsec0"), true, "matches_vpn('ipsec0')");
assert_eq!(matches_vpn(b"xfrm0"), true, "matches_vpn('xfrm0')");
assert_eq!(matches_vpn(b"utun3"), true, "matches_vpn('utun3')");
assert_eq!(matches_vpn(b"l2tp0"), true, "matches_vpn('l2tp0')");
assert_eq!(matches_vpn(b"gre0"), true, "matches_vpn('gre0')");
assert_eq!(matches_vpn(b"TUN0"), true, "matches_vpn('TUN0')");
assert_eq!(matches_vpn(b"Wg99"), true, "matches_vpn('Wg99')");
assert_eq!(matches_vpn(b"MyVPN"), true, "matches_vpn('MyVPN')");
assert_eq!(matches_vpn(b"custom_VPN_42"), true, "matches_vpn('custom_VPN_42')");
assert_eq!(matches_vpn(b"myvpn0"), true, "matches_vpn('myvpn0')");
assert_eq!(matches_vpn(b"vpn"), true, "matches_vpn('vpn')");
assert_eq!(matches_vpn(b"xvpn1"), true, "matches_vpn('xvpn1')");
assert_eq!(matches_vpn(b"lo"), false, "matches_vpn('lo')");
assert_eq!(matches_vpn(b"wlan0"), false, "matches_vpn('wlan0')");
assert_eq!(matches_vpn(b"wlan"), false, "matches_vpn('wlan')");
assert_eq!(matches_vpn(b"rmnet0"), false, "matches_vpn('rmnet0')");
assert_eq!(matches_vpn(b"rmnet_data0"), false, "matches_vpn('rmnet_data0')");
assert_eq!(matches_vpn(b"rmnet_ipa0"), false, "matches_vpn('rmnet_ipa0')");
assert_eq!(matches_vpn(b"eth0"), false, "matches_vpn('eth0')");
assert_eq!(matches_vpn(b"ccmni0"), false, "matches_vpn('ccmni0')");
assert_eq!(matches_vpn(b"seth_lte8"), false, "matches_vpn('seth_lte8')");
assert_eq!(matches_vpn(b"dummy0"), false, "matches_vpn('dummy0')");
assert_eq!(matches_vpn(b"bnep0"), false, "matches_vpn('bnep0')");
assert_eq!(matches_vpn(b"rndis0"), false, "matches_vpn('rndis0')");
assert_eq!(matches_vpn(b"if33"), true, "matches_vpn('if33')");
assert_eq!(matches_vpn(b"if0"), true, "matches_vpn('if0')");
assert_eq!(matches_vpn(b"if99"), true, "matches_vpn('if99')");
assert_eq!(matches_vpn(b"ifb0"), false, "matches_vpn('ifb0')");
assert_eq!(matches_vpn(b"ifb1"), false, "matches_vpn('ifb1')");
assert_eq!(matches_vpn(b"if"), false, "matches_vpn('if')");
assert_eq!(matches_vpn(b"if_inet6"), false, "matches_vpn('if_inet6')");
assert_eq!(matches_vpn(b""), false, "matches_vpn('')");
assert_eq!(matches_vpn(b"tunl"), true, "matches_vpn('tunl')");
assert_eq!(matches_vpn(b"atun0"), false, "matches_vpn('atun0')");
assert_eq!(matches_vpn(b"VPN"), true, "matches_vpn('VPN')");
assert!(matches_vpn(b"tun0"), "matches_vpn('tun0')");
assert!(matches_vpn(b"tun"), "matches_vpn('tun')");
assert!(matches_vpn(b"tun1234"), "matches_vpn('tun1234')");
assert!(matches_vpn(b"tap0"), "matches_vpn('tap0')");
assert!(matches_vpn(b"wg0"), "matches_vpn('wg0')");
assert!(matches_vpn(b"wg-client"), "matches_vpn('wg-client')");
assert!(matches_vpn(b"ppp0"), "matches_vpn('ppp0')");
assert!(matches_vpn(b"ipsec0"), "matches_vpn('ipsec0')");
assert!(matches_vpn(b"xfrm0"), "matches_vpn('xfrm0')");
assert!(matches_vpn(b"utun3"), "matches_vpn('utun3')");
assert!(matches_vpn(b"l2tp0"), "matches_vpn('l2tp0')");
assert!(matches_vpn(b"gre0"), "matches_vpn('gre0')");
assert!(matches_vpn(b"TUN0"), "matches_vpn('TUN0')");
assert!(matches_vpn(b"Wg99"), "matches_vpn('Wg99')");
assert!(matches_vpn(b"MyVPN"), "matches_vpn('MyVPN')");
assert!(matches_vpn(b"custom_VPN_42"), "matches_vpn('custom_VPN_42')");
assert!(matches_vpn(b"myvpn0"), "matches_vpn('myvpn0')");
assert!(matches_vpn(b"vpn"), "matches_vpn('vpn')");
assert!(matches_vpn(b"xvpn1"), "matches_vpn('xvpn1')");
assert!(!matches_vpn(b"lo"), "matches_vpn('lo')");
assert!(!matches_vpn(b"wlan0"), "matches_vpn('wlan0')");
assert!(!matches_vpn(b"wlan"), "matches_vpn('wlan')");
assert!(!matches_vpn(b"rmnet0"), "matches_vpn('rmnet0')");
assert!(!matches_vpn(b"rmnet_data0"), "matches_vpn('rmnet_data0')");
assert!(!matches_vpn(b"rmnet_ipa0"), "matches_vpn('rmnet_ipa0')");
assert!(!matches_vpn(b"eth0"), "matches_vpn('eth0')");
assert!(!matches_vpn(b"ccmni0"), "matches_vpn('ccmni0')");
assert!(!matches_vpn(b"seth_lte8"), "matches_vpn('seth_lte8')");
assert!(!matches_vpn(b"dummy0"), "matches_vpn('dummy0')");
assert!(!matches_vpn(b"bnep0"), "matches_vpn('bnep0')");
assert!(!matches_vpn(b"rndis0"), "matches_vpn('rndis0')");
assert!(matches_vpn(b"if33"), "matches_vpn('if33')");
assert!(matches_vpn(b"if0"), "matches_vpn('if0')");
assert!(matches_vpn(b"if99"), "matches_vpn('if99')");
assert!(!matches_vpn(b"ifb0"), "matches_vpn('ifb0')");
assert!(!matches_vpn(b"ifb1"), "matches_vpn('ifb1')");
assert!(!matches_vpn(b"if"), "matches_vpn('if')");
assert!(!matches_vpn(b"if_inet6"), "matches_vpn('if_inet6')");
assert!(!matches_vpn(b""), "matches_vpn('')");
assert!(matches_vpn(b"tunl"), "matches_vpn('tunl')");
assert!(!matches_vpn(b"atun0"), "matches_vpn('atun0')");
assert!(matches_vpn(b"VPN"), "matches_vpn('VPN')");
}
}
16 changes: 13 additions & 3 deletions scripts/changelog_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,15 +230,25 @@ def rotate_fragments_into_history(
version: str,
) -> dict:
"""Promote the current fragment set into `history[0]` with the given
version, then delete the fragment files. Returns the newly-released
entry.
version. Returns the newly-released entry.

Does NOT delete the fragment files — caller is responsible, and must
do so AFTER persisting `data` to disk via `save_json`. The split
keeps the operation recoverable: if save_json fails the fragments
are still on disk and the next run rebuilds the same history entry.
Use `delete_fragment_files` for the cleanup step.
"""
released = {
"version": version,
"sections": fragments_as_sections(fragments),
}
history = data.setdefault("history", [])
history.insert(0, released)
return released


def delete_fragment_files(fragments: list[dict]) -> None:
"""Remove fragment files from disk. Call this AFTER `save_json` has
successfully persisted the rotated history."""
for fragment in fragments:
fragment["path"].unlink()
return released
8 changes: 6 additions & 2 deletions scripts/codegen-interfaces.py
Original file line number Diff line number Diff line change
Expand Up @@ -461,9 +461,13 @@ def emit_rust(rules: list[Rule], tests: list[TestVector]) -> str:
lines.append(" #[test]")
lines.append(" fn generated_vectors() {")
for t in tests:
expected = "true" if t.is_vpn else "false"
# `assert!(x, msg)` / `assert!(!x, msg)` instead of
# `assert_eq!(x, true/false, msg)` — clippy::bool_assert_comparison
# would otherwise fire on every generated row when contributors
# run `cargo clippy --tests`.
prefix = "" if t.is_vpn else "!"
lines.append(
f" assert_eq!(matches_vpn({rust_byte_lit(t.name)}), {expected}, "
f" assert!({prefix}matches_vpn({rust_byte_lit(t.name)}), "
f'"matches_vpn({t.name!r})");'
)
lines.append(" }")
Expand Down
19 changes: 17 additions & 2 deletions scripts/release.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
sys.path.insert(0, str(Path(__file__).resolve().parent))
from changelog_lib import ( # type: ignore[import-not-found]
REPO_ROOT,
delete_fragment_files,
load_fragments,
load_json,
rotate_fragments_into_history,
Expand All @@ -60,10 +61,21 @@ def parse_version(raw: str) -> tuple[str, int]:


def patch_file(path: Path, replacements: list[tuple[re.Pattern[str], str]]) -> None:
"""Apply each pattern → replacement once.

Hard-fails if any pattern doesn't match. Silently leaving a stale
version in some file because the format drifted from what the regex
expects is exactly the failure mode we want to catch loudly.
"""
text = path.read_text(encoding="utf-8")
new_text = text
for pattern, replacement in replacements:
new_text = pattern.sub(replacement, new_text, count=1)
new_text, n = pattern.subn(replacement, new_text, count=1)
if n == 0:
raise SystemExit(
f"error: pattern {pattern.pattern!r} did not match in {path}. "
f"File format probably changed — update release.py."
)
if new_text != text:
path.write_text(new_text, encoding="utf-8")

Expand Down Expand Up @@ -139,10 +151,13 @@ def main() -> int:
console.print(f"[red]missing:[/red] {f.relative_to(REPO_ROOT)}")
return 1

# Changelog: rotate fragments into history, then delete them.
# Changelog: rotate fragments into history, persist, then delete the
# fragment files. Order matters — if save_json/write_md fails, the
# fragments are still on disk and the run can be retried safely.
rotate_fragments_into_history(data, fragments, version)
save_json(data)
write_md(data)
delete_fragment_files(fragments)
console.print(
f" [green]✓[/green] changelog: {len(fragments)} fragment(s) → history[0] as v{version}",
)
Expand Down
Loading