diff --git a/.envrc b/.envrc new file mode 100644 index 00000000..621bb5f9 --- /dev/null +++ b/.envrc @@ -0,0 +1,5 @@ +# export PKGX_DIR="$PWD/artifacts/$(uname)/$(uname -m)" +# export PKGX_PANTRY_DIR="$PWD/builds/pantry" +PATH_add bin +export DOCKER_DEFAULT_PLATFORM=linux/amd64 +export AWS_PROFILE=tea diff --git a/.github/Dockerfile b/.github/Dockerfile new file mode 100644 index 00000000..c099d9dd --- /dev/null +++ b/.github/Dockerfile @@ -0,0 +1,21 @@ +FROM debian:buster-slim AS stage1 +RUN apt-get update && apt-get install -y curl +RUN curl https://pkgx.sh/$(uname)/$(uname -m).tgz | tar xz -C /usr/local/bin +ENV PKGX_DIST_URL="https://dist.pkgx.dev/v2" +RUN pkgx +llvm.org +jq +make +patchelf +deno^2 >/dev/null +COPY ./brewkit /work/brewkit +COPY ./deno.jsonc /work/deno.jsonc +COPY ./deno.lock /work/deno.lock +RUN cd /work && find ./brewkit -name \*.ts | xargs pkgx deno cache + +FROM debian:buster-slim AS stage2 +COPY --from=stage1 /usr/local/bin/pkgx /usr/local/bin/pkgx +COPY --from=stage1 /root/.pkgx /root/.pkgx +COPY --from=stage1 /root/.cache/deno /root/.cache/deno + +# libc6-dev: platform specific c-headers that LLVM doesn’t provide +# libgcc-8-dev: provides the c runtime `crtbeginS.o` +# libstdc++-8-dev: or we can’t build c++ pkgs +RUN apt-get update && apt-get install --yes libc6-dev libgcc-8-dev libstdc++-8-dev + +ENV PKGX_DIST_URL="https://dist.pkgx.dev/v2" diff --git a/.github/actions/plan/action.yml b/.github/actions/plan/action.yml new file mode 100644 index 00000000..9c6cf168 --- /dev/null +++ b/.github/actions/plan/action.yml @@ -0,0 +1,48 @@ +runs: + using: composite + steps: + - id: changed-files + uses: tj-actions/changed-files@v45 + with: + files: | + projects/**/build.ts + projects/**/test.ts + projects/**/versions.ts + + - name: crunch diff + id: cruncher + run: | + PROJECTS=() + declare -A SEEN + for x in ${{ steps.changed-files.outputs.all_changed_files }}; do + x="$(dirname "${x#projects/}")" + if [ -z "${SEEN[$x]}" ]; then + PROJECTS+=("$x") + SEEN["$x"]=1 + fi + done + echo "projects=${PROJECTS[@]}" >> "$GITHUB_OUTPUT" + + shell: bash + if: ${{ ! inputs.pkgs }} + + - name: compute matrix + id: computer + run: | + ${GITHUB_ACTION_PATH}/compute-matrix.js ${{ inputs.pkgs || steps.cruncher.outputs.projects || inputs.default }} + shell: bash + if: ${{ inputs.compute-matrix }} + +inputs: + compute-matrix: + default: true + pkgs: + required: false + default: + required: false + +outputs: + projects: + value: ${{ steps.cruncher.outputs.projects }} + matrix: + value: ${{ steps.computer.outputs.matrix }} diff --git a/.github/actions/plan/compute-matrix.js b/.github/actions/plan/compute-matrix.js new file mode 100755 index 00000000..2abf9416 --- /dev/null +++ b/.github/actions/plan/compute-matrix.js @@ -0,0 +1,63 @@ +#!/usr/bin/env node + +const fs = require("fs"); + +(async function () { + const rvv = []; + for (const arg of process.argv.slice(2)) { + const config = await get_config(); + + for (const platform of config.platforms) { + const rv = {}; + rv["platform"] = get_matrix(platform); + rv["pkg"] = arg; + rvv.push(rv); + } + } + + const json = JSON.stringify(rvv); + fs.appendFileSync(process.env.GITHUB_OUTPUT, `matrix=${json}\n`); +})(); + +/////////////////////////////////////////////////////////////////////// + +async function get_config() { + return { platforms: ["linux/x86-64", "darwin/aarch64", "windows/x86-64"] }; +} + +function get_matrix(platform) { + const name = platform.replace("/", "+"); + switch (platform) { + case "darwin/aarch64": + return { + os: "macos-latest", + name, + tinyname: "²", + }; + case "darwin/x86-64": + return { + os: ["self-hosted", "macOS", "X64"], + name, + tinyname: "x64", + }; + case "linux/x86-64": + return { + os: "ubuntu-latest", + name, + container: "ghcr.io/pkgxdev/bldbot", + tinyname: "*nix64", + }; + case "linux/aarch64": + return { + os: ["self-hosted", "linux", "ARM64"], + name, + tinyname: "*nix·ARM64", + }; + case "windows/x86-64": + return { + os: "windows-latest", + name, + tinyname: "win64", + }; + } +} diff --git a/.github/actions/setup/action.yml b/.github/actions/setup/action.yml new file mode 100644 index 00000000..0edaf345 --- /dev/null +++ b/.github/actions/setup/action.yml @@ -0,0 +1,41 @@ +runs: + using: composite + steps: + - uses: denoland/setup-deno@v2 + if: runner.os == 'Windows' + + - run: | + "$PWD\\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append + + git fetch origin pkgx + git checkout FETCH_HEAD -- pkgx.exe + Move-Item -Path pkgx.exe -Destination bin + + Get-ChildItem -Path brewkit -Recurse -Filter *.ts | ForEach-Object { deno cache $_.FullName } + + choco install make -y + shell: pwsh + if: runner.os == 'Windows' + + - run: | + echo "$PWD/bin" >> $GITHUB_PATH + + case $(uname) in + Linux) + # $HOME is different in the docker image vs. github actions + ln -s /root/.pkgx $HOME + ln -s /root/.cache $HOME + ./bin/pkg-convert + ;; + Darwin) + curl https://pkgx.sh/$(uname)/$(uname -m).tgz | sudo tar xz -C /usr/local/bin + find brewkit -name \*.ts | xargs pkgx deno^2 cache + ./bin/pkg-convert + ;; + *) + make -f ./bin/pkg-convert + ;; + esac + + + shell: bash diff --git a/.github/scripts/generate-versions.ts b/.github/scripts/generate-versions.ts new file mode 100755 index 00000000..ed6a7dd5 --- /dev/null +++ b/.github/scripts/generate-versions.ts @@ -0,0 +1,34 @@ +#!/usr/bin/env -S pkgx deno run --quiet --allow-net --allow-read --allow-env --allow-sys +import { ListObjectsV2Command, S3Client } from "npm:@aws-sdk/client-s3@3.743.0"; +import SemVer, { compare } from "https://deno.land/x/libpkgx@v0.20.3/src/utils/semver.ts"; +import { basename } from "jsr:@std/path@^1.0.8"; + +const s3 = new S3Client(); +const Bucket = "dist.tea.xyz"; +const Prefix = Deno.args[0]; +const Key = `${Prefix}/versions.txt`; + +async function listObjects() { + const cmd = new ListObjectsV2Command({ Bucket, Prefix }); + const rsp = await s3.send(cmd); + + if (!rsp.Contents || rsp.Contents.length === 0) { + return []; + } + + const rv = new Set(); + for (const obj of rsp.Contents) { + const base = basename(obj!.Key); + if (/^v\d+/.test(base)) { + rv.add(base.replace(/\.tar\.[gx]z$/, "")); + } + } + return [...rv]; +} + +const versions = (await listObjects()).map((x) => new SemVer(x)).sort(compare) + .join("\n"); + +console.log(versions); + +//TODO upload, I tried but Deno and the AWS SDK don’t mix. Maybe bun’ll work? diff --git a/.github/workflows/cd.docker.yml b/.github/workflows/cd.docker.yml new file mode 100644 index 00000000..a7d69c14 --- /dev/null +++ b/.github/workflows/cd.docker.yml @@ -0,0 +1,59 @@ +name: cd·docker +run-name: cd·docker + +on: + push: + branches: + - main + paths: + - .github/workflows/cd.docker.yml + - .github/Dockerfile + - brewkit/**/*.ts + workflow_dispatch: + +concurrency: + group: docker + cancel-in-progress: true + +permissions: + contents: read + packages: write + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: docker/auth + run: echo "${{ github.token }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin + + - uses: docker/metadata-action@v5 + with: + images: ghcr.io/${{ github.repository_owner }}/bldbot + + - uses: docker/setup-qemu-action@v3 + - uses: docker/setup-buildx-action@v3 + + # TODO --platform linux/amd64,linux/arm64 + - name: docker/buildx + run: | + docker buildx build \ + --push \ + --tag ghcr.io/${{ github.repository_owner }}/bldbot:latest \ + --platform linux/amd64 \ + --file .github/Dockerfile \ + . + + prune: + needs: deploy + runs-on: ubuntu-latest + steps: + - uses: actions/delete-package-versions@v5 + with: + package-name: bldbot + package-type: container + min-versions-to-keep: 10 + # ^^ despite telling it to only delete untagged versions + # it deletes images attached to latest like a lolfactory + delete-only-untagged-versions: true diff --git a/.github/workflows/cd.pantry.tgz.yml b/.github/workflows/cd.pantry.tgz.yml new file mode 100644 index 00000000..6cc3ceb0 --- /dev/null +++ b/.github/workflows/cd.pantry.tgz.yml @@ -0,0 +1,38 @@ +name: cd·pantry.tgz +run-name: cd·pantry.tgz + +on: + push: + branches: main + paths: + - projects/**/* + - .github/workflows/cd.pantry.tgz.yml + +concurrency: + group: cd.pantry.tgz + cancel-in-progress: true + +jobs: + tarball: + runs-on: ubuntu-latest + steps: + - uses: aws-actions/configure-aws-credentials@v4 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-1 + + - uses: actions/checkout@v4 + - uses: pkgxdev/setup@v3 + + - run: bin/pkg-convert + + - run: tar -C artifacts/pantry -czf pantry.tgz projects + + - run: aws s3 cp + ./pantry.tgz + s3://dist.tea.xyz/v2/pantry.tgz + + - run: aws cloudfront create-invalidation + --distribution-id EWF6XUK8J11DP + --paths /v2/pantry.tgz diff --git a/.github/workflows/cd.pkgs.yml b/.github/workflows/cd.pkgs.yml new file mode 100644 index 00000000..26ad4407 --- /dev/null +++ b/.github/workflows/cd.pkgs.yml @@ -0,0 +1,95 @@ +name: cd·pkgs +run-name: cd·pkgs ${{inputs.pkgs}} + +on: + push: + branches: + main + paths: + projects/**/build.ts + workflow_dispatch: + inputs: + pkgs: + required: true + +env: + PKGX_DIST_URL: https://dist.tea.xyz/v2 + +jobs: + ingest: + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.computer.outputs.matrix }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/plan + id: computer + with: + pkgs: ${{ github.event.inputs.pkgs }} + + bottle: + needs: ingest + runs-on: ${{ matrix.platform.os }} + container: ${{ matrix.platform.container }} + name: build ${{ matrix.pkg }} ${{ matrix.platform.tinyname }} + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.ingest.outputs.matrix) }} + steps: + - uses: actions/checkout@v4 + + - uses: ./.github/actions/setup + + - run: pkg-build ${{ matrix.pkg }} + env: + GITHUB_TOKEN: ${{ github.token }} + + - run: pkg-test + + - name: bottle + run: | + eval "$(pkgx mash pkgx/ensure +xz)" + + OLDWD="$PWD" + + if [ $(uname) = Darwin ]; then + # pkgx cannot untar into read only directories on macOS + # NOTE the true fix would be to make the dirs writable when + # untarring and then make them read only again after + find "$PREFIX" -type f | xargs chmod -w + else + chmod -R -w "$PREFIX" + fi + + cd $(mktemp -d) + mkdir -p $PROJECT + mv "$PREFIX" $PROJECT + + tar czf $OLDWD/v$VERSION.tar.gz * + tar cJf $OLDWD/v$VERSION.tar.xz * + + - uses: aws-actions/configure-aws-credentials@v4 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-1 + + - name: s3 sync + run: | + eval "$(pkgx mash pkgx/ensure +aws)" + + aws s3 cp v$VERSION.tar.gz s3://dist.tea.xyz/v2/$PROJECT/$DIST_PLATFORM/v$VERSION.tar.gz + aws s3 cp v$VERSION.tar.xz s3://dist.tea.xyz/v2/$PROJECT/$DIST_PLATFORM/v$VERSION.tar.xz + .github/scripts/generate-versions.ts v2/$PROJECT/$DIST_PLATFORM > versions.txt + aws s3 cp versions.txt s3://dist.tea.xyz/v2/$PROJECT/$DIST_PLATFORM/versions.txt + + - name: cf invalidate + run: | + pkgx mash pkgx/ensure \ + aws cloudfront create-invalidation \ + --distribution-id EWF6XUK8J11DP \ + --paths \ + /v2/$PROJECT/$DIST_PLATFORM/versions.txt \ + /v2/$PROJECT/$DIST_PLATFORM/v$VERSION.tar.gz \ + /v2/$PROJECT/$DIST_PLATFORM/v$VERSION.tar.xz diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..95fccf64 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,53 @@ +name: ci + +on: + pull_request: + paths: + - projects/**/*.ts + - .github/workflows/ci.yml + +concurrency: + group: ci/${{ github.event.pull_request.head.ref }} + cancel-in-progress: true + +env: + PKGX_DIST_URL: https://dist.tea.xyz/v2 + +jobs: + plan: + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.compute.outputs.matrix }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/plan + id: compute + with: + default: zlib.net + + validate: + needs: plan + runs-on: ${{ matrix.platform.os }} + container: ${{ matrix.platform.container }} + name: ${{ matrix.pkg }} (${{ matrix.platform.tinyname }}) + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.plan.outputs.matrix) }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/setup + + - run: pkg-build ${{ matrix.pkg }} + env: + GITHUB_TOKEN: ${{ github.token }} + if: ${{ runner.os != 'Windows'}} + + - run: pkg-build ${{ matrix.pkg }} + env: + GITHUB_TOKEN: ${{ github.token }} + if: ${{ runner.os == 'Windows'}} + shell: pwsh + + - run: pkg-test + if: ${{ runner.os != 'Windows' }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..2a679469 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +/artifacts +/srcs diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..cbac5697 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "deno.enable": true +} diff --git a/README.md b/README.md new file mode 100644 index 00000000..e32402e3 --- /dev/null +++ b/README.md @@ -0,0 +1,114 @@ +# `pantry^2` + +- Configuration is hardcoded to /etc +- We use vendor-built binaries where possible + - We assume the vendor knows how to build their product better than us + - Means issues are almost always upstream and not ours + - Notably excepting when the prebuilt binaries vendor libraries we provide +- Windows support from day 1 +- Relocatable without environment fixes for all base packages +- If we package a package manager and it provides a tool (in a unpainful way) + then pkgx will invoke that other tool to get the thing + - eg. `cargo-binstall`, `npx`, `uvx` etc. will be used instead of us + packaging them + - these other mechanisms are generally _preferred_ by the developer and + end-user, we'll just know how to get you the end-product with a consistent + CLI + - this means we can focus on ensuring the base is as good as possible + without distraction from a massive pkg list + - We will still index everything and show it at [pkgx.dev/pkgs] +- Minimizing deps on macOS + - we added deps because Linux needed them in v1 due to laziness and urgency +- No magic + - git no longer looks for `git-foo` if you type `git foo` + - etc. + - we would consider adding these back, but not as _part_ of the package + itself. The package itself should solely focus on our other goals and + otherwise be vanilla. +- `pkgx^1,^2` will use pantry^1, `pkgx^3` will use pantry^2 +- building with as minimal images as is possible to ensure we are sure about + what goes into our packages +- First class `pkgm` support + - everything should install to `/usr/local` and just work +- No weird handling for calver +- no (or much less) pre-reqs on Linux + - some deps are unavoidable since they are heavily customized for the system + eg. librt +- variable deps + - eg. a major version requires new deps + - eg. heaven forbid, a minor version changes the dep constraint +- sources tarball locations can vary by version +- program lists can vary by version +- no deversioning of inner directories (for `pkgm`) +- no support for dollar prefixed moustaches in `package.yml` +- independently versioned things must be independent + - generally we are already good at this +- things without established versioning and/or programs are not valid to be + packaged. They go in `pkgo` +- standardize pkgs + - use XDG and standard dirs on other platforms + - configure things that install things to install them to + `${INSTALL_ROOT:-$HOME/.local}` by default +- more consistent project names + - no foo.github.io, just github.com etc. + - no strict adherance to homepages, it's more about namespacing + +[pkgx.dev/pkgs]: https://pkgx.dev/pkgs + +## Usage + +Assuming you are using `direnv` then: + +```sh +$ direnv allow + +$ pkg b python.org + +$ pkg t python.org + +$ pkg bt python.org +# ^^ build then test + +$ ls bin +# ^^ see what else we provide +``` + +> [!TIP] +> We download the sources every time. If you are building something that has +> large sources and want to debug faster then download the sources yourself +> and add them to `./srcs`. The build infra will use them. + +## Wins + +- Python from 280MB to 78MB +- Cleaner rpath handling across the board +- Less env pollution by carefully using `pkgx` during builds rather than + importing dep-envs before builds, meaning more reliable builds with less + unexpected deps +- Carefuly pruning of deps and build options for all base deps + +## FAQ + +### Why TypeScript? + +We prefer shell scripts for builds—there’s a lot of CLI commands and +manipulating paths, environment variables and doing pipes and things is way +easier there. But it’s not cross-platform and we wanted to support Windows. + +`pantry^1` used YAML & mostly shell script to build and test. YAML just +doesn’t cut it, you end up putting constructs in place to facilitate logic and +complexity mounts and mounts. You may as well just use a real language. + +Also we needed a cross platform language to support Windows. Bash is not that. + +### Criteria for Inclusion + +- We require that packages are versioned. +- We require that the project bew licensed such that we are permitted to + redistribute it. +- Stuff that is so new that its build instructions are likely to change a lot +in future may be rejected due to our inability to reliably maintain that. +- Things that can have other general executors (eg. npx) should be run that + way using the `providers` system. +- Things that do not respect reasonable release schedules may be rejected + (eg. we have seen packages release 10+ times a day, every day). diff --git a/bin/binspect b/bin/binspect new file mode 100755 index 00000000..a68fd902 --- /dev/null +++ b/bin/binspect @@ -0,0 +1,75 @@ +#!/usr/bin/env -S pkgx +gum +fd /bin/bash + +if [ -z "$1" ]; then + echo "usage: inspect {id|rpath} " >&2 + exit 2 +fi + +rpath() { + otool -l "$@" | + awk ' + /^[^ ]/ {f = 0} + $2 == "LC_RPATH" && $1 == "cmd" {f = 1} + f && gsub(/^ *path | \(offset [0-9]+\)$/, "") == 2' +} + +id() { + otool -D "$@" | awk 'NR > 1' +} + +install_names() { + if [[ "$1" == *.dylib ]]; then + otool -L "$@" | awk 'NR > 2' | awk '/\.dylib|\.framework/ {print $1}' + else + otool -L "$@" | awk '/\.dylib|\.framework/ {print $1}' + fi +} + +main() { + case $1 in + id) + shift + id "$@" + ;; + rp|rpath|rpaths) + shift + rpath "$@" + ;; + in|install_names) + shift + install_names "$@" + ;; + *) + if [[ $1 == *.dylib ]]; then + gum format '## id' + id "$@" + fi + gum format '## rpath' + rpath "$@" + gum format '## install names' + install_names "$@" + esac +} + +main_elf() { + gum format "## rpath" + pkgx patchelf --print-rpath "$@" + gum format "## needed" + pkgx patchelf --print-needed "$@" +} + +if [ -d $1 ]; then + fd -t f . "$1" | while IFS= read -r file; do + case "$(file $file)" in + *Mach-O*) + gum format "# \`$file\`" + main "$file" + ;; + *\ ELF\ *) + gum format "# \`$file\`" + main_elf "$file" + esac + done +else + main "$@" +fi diff --git a/bin/pkg b/bin/pkg new file mode 100755 index 00000000..d3f50fd7 --- /dev/null +++ b/bin/pkg @@ -0,0 +1,77 @@ +#!/bin/bash + +set -eo pipefail + +if [[ -z "$1" ]]; then + echo 'usage: pkg [btcd] ' >&2 + exit 2 +fi + +cmd="$1" +shift + +SRCROOT="$(cd "$(dirname "$0")/.." && pwd)" +B="$SRCROOT/bin" + +# Enable verbose mode if 'v' is in the command +[[ "$cmd" =~ v ]] && export VERBOSE=1 + +# If a direct pkg command exists, run it +if [ -x "$B/pkg-$cmd" ]; then + case "$cmd" in + build|test|edit) + exec "$B/pkg-$cmd" ${@:-$("$B/pkg-status")} + ;; + *) + exec "$B/pkg-$cmd" "$@" + esac +fi + +# Handle 'convert' (c) command +if [[ "$cmd" =~ c ]]; then + "$B/pkg-convert" + [[ "$cmd" == "c" ]] && exit +fi + +# Handle 'edit' (e) command +if [[ "$cmd" =~ e ]]; then + "$B/pkg-edit" ${@:-$("$B"/pkg-status)} + [[ "$cmd" == "e" ]] && exit +fi + +# Handle 'docker' (d) commands +if [[ "$cmd" =~ d ]]; then + if [[ "$cmd" =~ a ]]; then + for pkg in $("$B/pkg-ls"); do + pkgx gum format "# $pkg" + if [[ "$cmd" =~ b ]] && [ ! -d "$SRCROOT/artifacts/Linux/x86_64/$pkg" ]; then + "$B/pkg-docker" build $pkg + fi + [[ "$cmd" =~ t ]] && "$B/pkg-docker" test $pkg + done + else + [[ "$cmd" =~ b ]] && "$B/pkg-docker" build ${@:-$("$B/pkg-status")} + [[ "$cmd" =~ t ]] && "$B/pkg-docker" test ${@:-$("$B/pkg-status")} + fi + exit 0 +fi + +if [[ -d "$HOME/.pkgx/cli.github.com" && $(gh auth status >/dev/null 2>&1) ]]; then + export GH_TOKEN="$(gh auth token)" +fi + +if [[ "$cmd" =~ a ]]; then + for pkg in $("$B/pkg-ls"); do + pkgx gum format "# $pkg" + if [[ "$cmd" =~ b ]] && [ ! -d "$SRCROOT/artifacts/$(uname)/$(uname -m)/$pkg" ] || [[ "$cmd" =~ A ]]; then + "$B/pkg-build" $pkg + fi + [[ "$cmd" =~ t ]] && "$B/pkg-test" $pkg + done +else + [[ "$cmd" =~ b ]] && "$B/pkg-build" ${@:-$("$B/pkg-status")} + [[ "$cmd" =~ t ]] && "$B/pkg-test" ${@:-$("$B/pkg-status")} +fi + +exit 0 +#TODO error if invalid command! diff --git a/bin/pkg-build b/bin/pkg-build new file mode 100755 index 00000000..5a4fc2e1 --- /dev/null +++ b/bin/pkg-build @@ -0,0 +1,240 @@ +#!/bin/bash + +set -eo pipefail + +_main() { + [ "$GITHUB_ACTIONS" ] && echo "::group::prep" + + _prep_env "$@" + _prep_stage + _prep_deps + _prep_script + + [ "$GITHUB_ACTIONS" ] && echo "::endgroup::" + + _run + + _post +} + +_prep_env() { + SRCROOT="$(cd "$(dirname "$0")"/.. && pwd)" + PKGSPEC="$1" + PLATFORM="$(uname)/$(uname -m)" + + if [[ $PKGSPEC == *\* ]]; then + # pkgx^2 bug doesn’t parse the `*` + PROJECT="${PKGSPEC%\*}" + elif [[ $PKGSPEC =~ ^([^@^~=]+)([@^~=].*)$ ]]; then + PROJECT="${BASH_REMATCH[1]}" + CONSTRAINT="${BASH_REMATCH[2]}" + elif [ -f "$SRCROOT/projects/$PKGSPEC/package.yml" ]; then + PROJECT="$PKGSPEC" + fi + + if [ -z "$PKGSPEC" ]; then + echo "pkgspec invalid" >&2 + exit 2 + fi + + if [ ! -f "$SRCROOT/projects/$PROJECT/package.yml" ]; then + echo "package.yml not found for \`$PKGSPEC\`" >&2 + exit 2 + fi + + if [ ! -f "$SRCROOT/artifacts/pantry/projects/$PROJECT/package.yml" ]; then + echo "run \`pkg convert\` first" >&2 + exit 2 + fi + + export PKGX_PANTRY_DIR="$SRCROOT/artifacts/$(uname)/pantry" + export PKGX_DIST_URL="https://dist.pkgx.dev/v2" + + if [ ! -e "$SRCROOT/artifacts/$(uname)/pantry/projects" ]; then + mkdir -p "$SRCROOT/artifacts/$(uname)/pantry" + ln -sf ../../pantry/projects "$SRCROOT/artifacts/$(uname)/pantry/projects" + fi + + deno="$(pkgx -q +deno^2 -- which deno)" +} + +_prep_stage() { + case $(uname) in + Linux) + TMPDIR="$(mktemp -d -t pkgx.XXXXXX)" + cd "$TMPDIR" + cp -r "$SRCROOT/brewkit/toolchain/linux" bin + cp /usr/local/bin/pkgx bin + # these must be symlinks or they don’t behave like eg. gcc + ln -s "$(pkgx -q +llvm.org -- which clang)" bin/cc + ln -s "$(pkgx -q +llvm.org -- which clang)" bin/gcc + ln -s "$(pkgx -q +llvm.org -- which clang++)" bin/c++ + ln -s "$(pkgx -q +llvm.org -- which clang++)" bin/g++ + ln -s "$(pkgx -q +llvm.org -- which clang-cpp)" bin/cpp + ln -s "$(pkgx -q +llvm.org -- which ld.lld)" bin/ld + ln -s "$(pkgx -q +llvm.org -- which llvm-ar)" bin/ar + ln -s "$(pkgx -q +llvm.org -- which llvm-as)" bin/as + ln -s "$(pkgx -q +llvm.org -- which llvm-nm)" bin/nm + ln -s "$(pkgx -q +llvm.org -- which llvm-strings)" bin/strings + jq="pkgx -q jq" + deno_exec="$deno" + ;; + + Darwin) + TMPDIR="$(mktemp -d /tmp/pkgx.XXXXXX)" + cd "$TMPDIR" + cp -r "$SRCROOT/brewkit/toolchain/darwin" bin + cp /usr/local/bin/pkgx bin + # prevent build scripts from dipping into Homebrew or /usr/local + cat < sandbox.sb +(version 1) +(allow default) +(deny file-write* + (subpath "/opt/homebrew") + (subpath "$SRCROOT") + (subpath "/usr/local")) +(allow file-write* + (subpath "$SRCROOT/srcs")) +(deny file-read* + (subpath "/opt/homebrew") + (subpath "/usr/local")) +(deny process-exec* + (subpath "/opt/homebrew") + (subpath "/usr/local")) +EoSB + if [ -f "$SRCROOT/projects/$PROJECT/sandbox.sb" ]; then + cat "$SRCROOT/projects/$PROJECT/sandbox.sb" >> sandbox.sb + fi + if [ -f "$SRCROOT/projects/$PROJECT/sandbox.sb.in" ]; then + sed "s|{{prefix}}|$PREFIX|g" $SRCROOT/projects/$PROJECT/sandbox.sb.in >> sandbox.sb + fi + + deno_exec="sandbox-exec -f ./sandbox.sb $deno" + jq="$(which jq)" + export MACOSX_DEPLOYMENT_TARGET=11.0 + ;; + + MINGW64_NT*) + TMPDIR="$(mktemp -d)" + cd "$TMPDIR" + mkdir bin + cp "$(which pkgx)" bin + deno_exec="$deno" + jq="pkgx -q jq" + ;; + + *) + echo "unsupported platform" &>2 + exit 1 + ;; + esac + + export PKGX_BIN="$PWD/bin" +} + +_prep_deps() { + # copy anything built to the build’s pkgx dir since + # some of it may not be at dist.pkgx.dev yet + # FIXME symlinks would be faster but we don’t want builds to have write access to there + # FIXME ideally deps-env script would copy what we need to speed things up + if [ -d "$SRCROOT/artifacts/$PLATFORM" ]; then + cp -aR "$SRCROOT/artifacts/$PLATFORM" deps + fi + + # sanitize PATH before adding deps + export PATH="$PWD/bin:/usr/bin:/bin:/usr/sbin:/sbin" + # use this as PKGX_DIR from now on + # TODO ideally would do for whole script but would mean instaalling llvm/deno everytime (though that can be improved) + export PKGX_DIR="$PWD/deps" + + pkgenv="$($deno run --allow-read="$SRCROOT" --allow-run "$SRCROOT/brewkit/scripts/deps-env.ts" $PKGSPEC)" + eval "$pkgenv" +} + +_prep_script() { + if [ -f "$SRCROOT/projects/$PROJECT/versions.ts" ]; then + echo "import versions from '$SRCROOT/projects/$PROJECT/versions.ts';" > run-build.ts + else + echo "import { default_versions } from 'brewkit';" > run-build.ts + echo "const versions = default_versions('$SRCROOT/projects/$PROJECT/package.yml');" >> run-build.ts + fi + + if [ -f "$SRCROOT/projects/$PROJECT/fixup.ts" ]; then + echo "import process_fixup from '$SRCROOT/projects/$PROJECT/fixup.ts';" >> run-build.ts + else + echo "const process_fixup = () => true;" >> run-build.ts + fi + + echo "import build from '$SRCROOT/projects/$PROJECT/build.ts';" >> run-build.ts + cat "$SRCROOT/brewkit/build-template.ts" >> run-build.ts + echo "{\"imports\": {\"brewkit\": \"$SRCROOT/brewkit/mod.ts\"}}" > deno.json + # ensures deno uses the deps we already cached + cp "$SRCROOT/deno.lock" . +} + +_run() { + # we need full allow-read and write because Deno.symlink sucks + # TODO make our own implementation + + # we need full --allow-env to allow build scripts to set environment + # TODO they can do this some other way! + + $deno_exec run \ + --quiet \ + --allow-read \ + --allow-run \ + --allow-env \ + --allow-write \ + --allow-net \ + ./run-build.ts \ + "$PKGSPEC" \ + "$PWD/src" \ + "$SRCROOT/projects/$PROJECT" \ + "$PWD/deps" \ + "$PWD/prefix" +} + +_post() { + PROJECT="$(cat build-receipt.json | $jq -r .project)" + VERSION="$(cat build-receipt.json | $jq -r .version)" + + if [ -f "$GITHUB_ENV" ]; then + echo "VERSION=$VERSION" >> $GITHUB_ENV + echo "PROJECT=$PROJECT" >> $GITHUB_ENV + echo "PREFIX=$SRCROOT/artifacts/$PLATFORM/$PROJECT/v$VERSION" >> $GITHUB_ENV + echo "PLATFORM=$PLATFORM" >> $GITHUB_ENV + + case "$PLATFORM" in + Linux/x86_64) echo "DIST_PLATFORM=linux/x86-64" >> $GITHUB_ENV;; + Darwin/x86_64) echo "DIST_PLATFORM=darwin/x86-64" >> $GITHUB_ENV;; + Linux/arm64|Linux/aarch64) echo "DIST_PLATFORM=linux/aarch64" >> $GITHUB_ENV;; + Darwin/arm64) echo "DIST_PLATFORM=darwin/aarch64" >> $GITHUB_ENV;; + esac + fi + + rm -rf "$SRCROOT/artifacts/$PLATFORM/$PROJECT/v$VERSION" + rm -f "$SRCROOT/artifacts/$PLATFORM/$PROJECT/v$(echo "$VERSION" | sed -E 's/\.[0-9]+$//')" + rm -f "$SRCROOT/artifacts/$PLATFORM/$PROJECT/v$(echo "$VERSION" | sed -E 's/\.[0-9]+\.[0-9]+$//')" + mkdir -p "$SRCROOT/artifacts/$PLATFORM/$PROJECT" + + if [ $(uname) = Linux -a -f /.dockerenv ]; then + # do the below via tar instead of mv because Docker’s overlayfs + # doesn’t (100%) support moving files between directories + tar cf - ./prefix | tar -C "$SRCROOT/artifacts/$PLATFORM/$PROJECT" -xf - + mv "$SRCROOT/artifacts/$PLATFORM/$PROJECT/prefix" "$SRCROOT/artifacts/$PLATFORM/$PROJECT/v$VERSION" + else + mv prefix "$SRCROOT/artifacts/$PLATFORM/$PROJECT/v$VERSION" + fi + + mv build-receipt.json "$SRCROOT/artifacts/$PLATFORM/$PROJECT/v$VERSION" + + cd "$SRCROOT/artifacts" + rm -f _latest + ln -s $PLATFORM/$PROJECT/v$VERSION _latest + ln -s v$VERSION $PLATFORM/$PROJECT/v$(echo "$VERSION" | sed -E 's/\.[0-9]+$//') + ln -s v$VERSION $PLATFORM/$PROJECT/v$(echo "$VERSION" | sed -E 's/\.[0-9]+\.[0-9]+$//') + + rm -rf "$TMPDIR" +} + +_main "$@" diff --git a/bin/pkg-build.cmd b/bin/pkg-build.cmd new file mode 100644 index 00000000..fc20695d --- /dev/null +++ b/bin/pkg-build.cmd @@ -0,0 +1,12 @@ +@echo off +setlocal + +:: Get the directory of the .cmd file +set SCRIPT_DIR=%~dp0 + +:: Run pkg-build.ts with Deno, passing all arguments +deno run --allow-all "%SCRIPT_DIR%pkg-build.ts" %* +if %ERRORLEVEL% NEQ 0 exit /b %ERRORLEVEL% + +endlocal +exit /b 0 diff --git a/bin/pkg-build.ts b/bin/pkg-build.ts new file mode 100755 index 00000000..61bb63f2 --- /dev/null +++ b/bin/pkg-build.ts @@ -0,0 +1,159 @@ +#!/usr/bin/env -S pkgx deno^2 run -A + +import { parse, Path, SemVer } from "brewkit"; +import { fromFileUrl } from "jsr:@std/path@1/from-file-url"; +import parse_deps from "../brewkit/scripts/deps-env.ts"; + +const pkg = parse(Deno.args[0]); +const srcroot = new Path(fromFileUrl(import.meta.url)).parent().parent(); +const tmpdir = mktempd(); +const deno = "deno"; + +// copying pantry into tmpdir as weird issues with parallels shared folders +// that make pkgx fail to write db with “access denied” messages +srcroot.join("artifacts/pantry").cp({ into: tmpdir }); + +Deno.env.set('PKGX_PANTRY_DIR', tmpdir.join("pantry").string); +Deno.env.set('PKGX_DIST_URL', "https://dist.pkgx.dev/v2"); + +await prep_deps(); +await prep_script(); +await run(); +await post(); + +//Deno.removeSync(tmpdir, { recursive: true }); + +async function prep_script() { + let src = ""; + const root = srcroot.join("projects", pkg.project); + + let foo: Path | undefined; + if (foo = root.join("versions.ts").isFile()) { + src += `import versions from "${foo.fileURL()}";\n`; + } else { + src += "import { default_versions } from 'brewkit';\n"; + src += `const versions = default_versions("${root.join("package.yml").string.replaceAll("\\", "\\\\")}");\n`; + } + if (foo = root.join("fixup.ts").isFile()) { + src += `import process_fixup from "${foo.fileURL()}";\n`; + } else { + src += "const process_fixup = () => true;\n"; + } + + src += `import build from "${root.join("build.ts").fileURL()}";\n`; + src += await srcroot.join("brewkit/build-template.ts").read(); + + tmpdir.join("run-build.ts").write(src); + + tmpdir.join("deno.json").write( + `{"imports": {"brewkit": "${srcroot.join("brewkit/mod.ts").fileURL()}"}}` + ); + + srcroot.join("deno.lock").cp({ into: tmpdir }); +} + +async function prep_deps() { + let foo: Path | undefined; + if (foo = srcroot.join("artifacts", platform_partial_path()).isDirectory()) { + foo.cp({ to: tmpdir.join("deps") }); + } + + Deno.env.set("PKGX_DIR", tmpdir.join("deps").string); + + const json = await parse_deps(pkg.project); + + if (json) { + const SEP = Deno.build.os == "windows" ? ";" : ":"; + + for (const [key, paths] of Object.entries(json.env) as [string, string[]][]) { + let value = paths.join(SEP); + if (Deno.env.get(key)) { + value += `${SEP}\${${key}}`; + } + Deno.env.set(key, value); + } + + for (const [_pkg, env_dict] of Object.entries(json.runtime_env)) { + for (let [key, value] of Object.entries(env_dict as any)) { + const { path } = json.pkgs.find(({ project }: any) => project === _pkg); + value = (value as string).replace("{{prefix}}", path); + Deno.env.set(key, value as string); + } + } + + const pkgs = Object.values(json.pkgs).map(({ path }: any) => path); + Deno.env.set("PKGS", pkgs.join(SEP)); + } +} + +async function run() { + //NOTE --allow-all is required for Windows, dunno why but whatever + const args = [ + "run", + "--quiet", + "--allow-all", + tmpdir.join("run-build.ts").string, + `${pkg.project}${pkg.constraint.toString() == '*' ? '' : pkg.constraint}`, + tmpdir.join("src").string, + srcroot.join("projects", pkg.project).string, + tmpdir.join("deps").string, + tmpdir.join("prefix").string, + ]; + const proc = new Deno.Command(deno, { args }).spawn(); + const { code, success } = await proc.status; + if (!success) { + Deno.exit(code); + } +} + +async function post() { + const receipt = tmpdir.join("build-receipt.json") + const version = new SemVer(JSON.parse(await receipt.read()).version); + const root = srcroot.join("artifacts", platform_partial_path(), pkg.project).mkdir('p'); + const dstdir = root.join(`v${version}`); + + dstdir.rm('rf'); + + if (Deno.build.os == "windows" && tmpdir.drive() != dstdir.drive()) { + // cannnot move across drives and it we’re using parallels then they + // _will_be_ separate drives + tmpdir.join("prefix").cp({ to: dstdir }); + receipt.cp({ into: dstdir }); + } else { + tmpdir.join("prefix").mv({ to: dstdir }); + receipt.mv({ into: dstdir }); + } +} + +function platform() { + const platform = (() => { + switch (Deno.build.os) { + case "darwin": return "Darwin"; + case "linux": return "Linux"; + case "windows": return "Windows"; + default: + return Deno.build.os; + } + })() + const arch = (() => { + switch (Deno.build.arch) { + case "x86_64": return "x86-64"; + case "aarch64": return "aarch64"; + } + })() + return [platform, arch]; +} + +import { SEPARATOR as SEP } from "jsr:@std/path@^1"; + +function platform_partial_path() { + return platform().join(SEP); +} + +function mktempd() { + // if (Deno.build.os == "windows") { + // return new Path(Deno.makeTempDirSync({ dir: srcroot.join("tmp").mkdir().string })); + // } else { + return new Path(Deno.makeTempDirSync({ prefix: "pkgx." })); + // } +} diff --git a/bin/pkg-convert b/bin/pkg-convert new file mode 100755 index 00000000..b02841e0 --- /dev/null +++ b/bin/pkg-convert @@ -0,0 +1,24 @@ +#!/usr/bin/env -S pkgx -q make -f + +.PHONY: all debug +MAKEFILE_DIR := $(dir $(abspath $(lastword $(MAKEFILE_LIST)))) +SRCROOT := $(abspath $(MAKEFILE_DIR)/..) +PKG_FILES := $(shell find $(SRCROOT)/projects -name package.yml) +OUTPUTS := $(PKG_FILES:$(SRCROOT)/projects/%=$(SRCROOT)/artifacts/pantry/projects/%) + +ifeq ($(OS),Windows_NT) + DENO := deno run -RW +endif + +# Default target +all: $(OUTPUTS) + +# Rule to process each package.yml file +$(SRCROOT)/artifacts/pantry/projects/%: $(SRCROOT)/projects/% + @mkdir -p $(dir $@) + $(DENO) $(SRCROOT)/brewkit/scripts/convert-package.yml.ts "$<" > "$@" + +debug: + @echo "PKG_FILES: $(PKG_FILES)" + @echo "OUTPUTS: $(OUTPUTS)" + @echo "SRCROOT: $(SRCROOT)" diff --git a/bin/pkg-docker b/bin/pkg-docker new file mode 100755 index 00000000..f60d46c5 --- /dev/null +++ b/bin/pkg-docker @@ -0,0 +1,34 @@ +#!/bin/sh + +SRCROOT="$(cd "$(dirname "$0")"/.. && pwd)" +img=ghcr.io/pkgxdev/bldbot +cmd=$1 +shift + +eval "$(pkgx mash pkgx/ensure +docker)" + +case $cmd in +build) + exec docker run --rm -v $SRCROOT:/work -w /work -e GITHUB_TOKEN $img bin/pkg-build "$@" + ;; +test) + exec docker run --rm -v $SRCROOT:/work -w /work -e GITHUB_TOKEN $img bin/pkg-test "$@" + ;; +run) + exec docker run \ + --rm \ + --volume $SRCROOT:/work \ + -w /work \ + -e GITHUB_TOKEN \ + -e PATH="/work/bin:/usr/local/bin:/usr/bin:/usr/sbin:/bin:/sbin" \ + -it \ + $img + ;; +pull) + exec docker pull $img + ;; +*) + echo "usage: pkg docker [build|test|run|pull]" >&2 + exit 2 + ;; +esac diff --git a/bin/pkg-edit b/bin/pkg-edit new file mode 100755 index 00000000..9cd8d7ff --- /dev/null +++ b/bin/pkg-edit @@ -0,0 +1,50 @@ +#!/bin/bash + +if [ $# -eq 0 ]; then + echo "usage: pkg edit " >&2 + exit 2 +fi + +d="$(cd "$(dirname "$0")"/.. && pwd)" + +PKGS=() +for PKG in "$@"; do + PKG="$d/projects/$PKG" + + if [ ! -f "$PKG/package.yml" ]; then + echo "error: no such package: $1" >&2 + exit 1 + fi + + PKGS+=("$PKG") +done + +if [ -z "$EDITOR" ]; then + if command -v code >/dev/null; then + EDITOR="code" + elif command -v vim >/dev/null; then + EDITOR="vim" + elif command -v vi >/dev/null; then + EDITOR="vi" + elif command -v emacs >/dev/null; then + EDITOR="emacs" + else + echo "error: \`\$EDITOR\` is not set" >&2 + exit 1 + fi +elif [ "$EDITOR" = code_wait ]; then + # this is for mxcl who generally prefers his editor to wait + # but not in this case. #perks-of-making-the-thing + EDITOR=code +fi + +if [ "$EDITOR" = code ]; then + ARGS=() + for PKG in "${PKGS[@]}"; do + ARGS+=("$PKG" "$PKG/build.ts" "$PKG/package.yml") + done + + exec $EDITOR "$d" "${ARGS[@]}" +else + exec $EDITOR "${PKGS[@]}" +fi diff --git a/bin/pkg-fixup b/bin/pkg-fixup new file mode 100755 index 00000000..3022e4cc --- /dev/null +++ b/bin/pkg-fixup @@ -0,0 +1,14 @@ +#!/bin/bash + +set -eo pipefail + +TMPDIR=$(mktemp -d) +cd "$TMPDIR" + +echo ' +await fixup( + prefix, + { prefixes: deps_prefixes, PKGX_DIR }, + PKGX_DIR, +);' >> run-fixup.ts + diff --git a/bin/pkg-ls b/bin/pkg-ls new file mode 100755 index 00000000..ddb89104 --- /dev/null +++ b/bin/pkg-ls @@ -0,0 +1,10 @@ +#!/bin/sh + +cd "$(dirname "$0")/.." + +find projects -type f -name package.yml | while read -r file; do + last_commit=$(git log -1 --format="%at %H" -- "$file") + file=${file#projects/} + file=${file%/package.yml} + echo "$last_commit $file" +done | sort -n | awk '{for (i=3; i<=NF; ++i) printf "%s%s", $i, (i versions.txt +aws s3 cp versions.txt s3://dist.tea.xyz/v2/$PROJECT/$DIST_PLATFORM/versions.txt + +chmod -R +w . +cd / +rm -rf "$TMPDIR" + +aws cloudfront create-invalidation \ + --distribution-id EWF6XUK8J11DP \ + --paths "/v2/$PROJECT/$DIST_PLATFORM/$VERSION.tar.gz" "/v2/$PROJECT/$DIST_PLATFORM/$VERSION.tar.xz" "/v2/$PROJECT/$DIST_PLATFORM/versions.txt" diff --git a/bin/pkg-status b/bin/pkg-status new file mode 100755 index 00000000..c2f29eb6 --- /dev/null +++ b/bin/pkg-status @@ -0,0 +1,30 @@ +#!/bin/sh + +SRCROOT="$(cd "$(dirname "$0")"/.. && pwd)" + +eval "$(pkgx mash pkgx/ensure +git +column +sort +uniq)" + +# sadly we seemingly need to reference origin/main +DIVERGENCE_SHA="$(git merge-base HEAD origin/main)" +CHANGED_FILES="$(git diff --name-only "$DIVERGENCE_SHA") $(git status --untracked-files --porcelain)" + +OUTPUT="" + +for CHANGED_FILE in $CHANGED_FILES; do + PROJECT=$(echo "$CHANGED_FILE" | sed -n 's#projects/\(.*\)/package\.yml$#\1#p') + if test -z "$PROJECT" + then + true # noop + elif test "$1" = "--print-paths"; then + OUTPUT="$OUTPUT $CHANGED_FILE" + else + OUTPUT="$OUTPUT $PROJECT" + fi +done + +if [ -z "$OUTPUT" ]; then + echo "no new or modified manifests found" >&2 + exit 1 +else + echo $OUTPUT | tr ' ' '\n' | sort | uniq | column +fi diff --git a/bin/pkg-test b/bin/pkg-test new file mode 100755 index 00000000..ed3b429a --- /dev/null +++ b/bin/pkg-test @@ -0,0 +1,166 @@ +#!/bin/bash + +set -eo pipefail + +SRCROOT="$(cd "$(dirname "$0")/.." && pwd)" +PKGSPEC="$1" +PLATFORM="$(uname)/$(uname -m)" +deno="$(pkgx +deno^2 -- which deno)" + +export PKGX_PANTRY_DIR="$SRCROOT/artifacts/$(uname)/pantry" +export PKGX_DIST_URL="https://dist.pkgx.dev/v2" + +if [ "$PKGSPEC" ]; then + if [[ $PKGSPEC == *\* ]]; then + PROJECT="${PKGSPEC%\*}" + CONSTRAINT='*' + elif [[ $PKGSPEC =~ ^([^@^~=]+)([@^~=].*)$ ]]; then + PROJECT="${BASH_REMATCH[1]}" + CONSTRAINT="${BASH_REMATCH[2]}" + elif [ -f "$SRCROOT/projects/$PKGSPEC/package.yml" ]; then + PROJECT="$PKGSPEC" + CONSTRAINT='*' + else + echo "pkgspec invalid" >&2 + exit 1 + fi + for x in "$SRCROOT/artifacts/$PLATFORM/$PROJECT/"v*; do + PREFIX="$x" + done +elif [ ! "$PROJECT" -o ! "$PREFIX" ]; then + echo "Usage: $0 " + exit 1 +fi + +case $(uname) in +Linux) + TMPDIR="$(mktemp -d -t pkgx.XXXXXX)" + cd "$TMPDIR" + cp -r $SRCROOT/brewkit/toolchain/linux bin + cp /usr/local/bin/pkgx bin + # these must be symlinks or they don’t behave like eg. gcc + ln -s "$(pkgx -q +llvm.org -- which clang)" bin/cc + ln -s "$(pkgx -q +llvm.org -- which clang)" bin/gcc + ln -s "$(pkgx -q +llvm.org -- which clang++)" bin/c++ + ln -s "$(pkgx -q +llvm.org -- which clang++)" bin/g++ + ln -s "$(pkgx -q +llvm.org -- which clang-cpp)" bin/cpp + ln -s "$(pkgx -q +llvm.org -- which ld.lld)" bin/ld + ln -s "$(pkgx -q +llvm.org -- which llvm-ar)" bin/ar + ln -s "$(pkgx -q +llvm.org -- which llvm-as)" bin/as + ln -s "$(pkgx -q +llvm.org -- which llvm-nm)" bin/nm + ln -s "$(pkgx -q +llvm.org -- which llvm-strings)" bin/strings + deno_exec="$deno" + ;; +Darwin) + TMPDIR="$(mktemp -d /tmp/pkgx.XXXXXX)" + cd "$(realpath "$TMPDIR")" # resolves the symlink and stops deno complaining about writes to a slightly different directory + cp -r "$SRCROOT/brewkit/toolchain/darwin" bin + cp /usr/local/bin/pkgx bin + + cat < sandbox.sb +(version 1) +(allow default) +(deny file-write* + (subpath "/opt/homebrew") + (subpath "$SRCROOT") + (subpath "/usr/local")) +(deny file-read* + (subpath "/opt/homebrew") + (subpath "/usr/local")) +(deny process-exec* + (subpath "/opt/homebrew") + (subpath "/usr/local")) +EoSB + if [ -f $SRCROOT/projects/$PROJECT/sandbox.sb ]; then + cat $SRCROOT/projects/$PROJECT/sandbox.sb >> sandbox.sb + fi + if [ -f $SRCROOT/projects/$PROJECT/sandbox.sb.in ]; then + sed "s|{{prefix}}|$PREFIX|g" $SRCROOT/projects/$PROJECT/sandbox.sb.in >> sandbox.sb + fi + + deno_exec="sandbox-exec -f "$PWD/sandbox.sb" $deno" +esac + +cat << EoTS > run-test.ts +import test_runner from '$SRCROOT/projects/$PROJECT/test.ts'; +import { Path, SemVer } from 'brewkit'; +const prefix = new Path(Deno.args[0]); +const version = new SemVer(prefix.basename()); +await test_runner({prefix, version}); +EoTS + +echo "{\"imports\": {\"brewkit\": \"$SRCROOT/brewkit/mod.ts\"}}" > deno.json +cp "$SRCROOT/deno.lock" . + +export PATH="/usr/bin:/bin:/usr/sbin:/sbin" + +if [ $(uname) != "Darwin" ]; then + export PATH="$PWD/bin:$PATH" +fi + +if [ -d "$SRCROOT/artifacts/$PLATFORM" ]; then + cp -aR "$SRCROOT/artifacts/$PLATFORM" deps +fi + +mkdir -p "$SRCROOT/artifacts/$(uname)/pantry" +cd "$SRCROOT/artifacts/$(uname)/pantry" +ln -sf ../../pantry/projects +cd - + +env_out="$(PKGX_DIR="$SRCROOT/artifacts/$PLATFORM" /usr/local/bin/pkgx +$PROJECT)" +set -a +eval "$env_out" +set +a + +if [ $(uname) = "Darwin" ]; then + # we need our cc/ld to be first because they add our RPATH linkage flags + # and without those nothing compiles/links. This isn’t required on Linux + # since Linux doesn’t have a ”id” system. + export PATH="$PWD/bin:$PATH" +fi + +if [ "$GITHUB_ACTIONS" ]; then + echo "::group::env" + env + echo "::endgroup::" +fi + +mkdir stage +cd stage + +for x in "$SRCROOT"/projects/$PROJECT/*; do + if [[ "$x" = */package.yml ]]; then + true + elif [[ "$x" == *.ts ]]; then + true + elif [ -f "$x" ]; then + cp "$x" . + fi +done + +allow_env=() +allow_rw=() +for key in PWD TMP TEMP TMPDIR TEMPDIR; do + if [ -n "${!key}" ]; then + allow_env+=("$key") + allow_rw+=("${!key}") + fi +done +allow_env+=("HOME") +allow_rw=$(IFS=,; echo "${allow_rw[*]}") +allow_env=$(IFS=,; echo "${allow_env[*]}") + + +$deno_exec \ + run \ + --quiet \ + --allow-read="$allow_rw" \ + --allow-run \ + --allow-env="${allow_env}" \ + --allow-write="${allow_rw}" \ + --allow-net \ + --ext=ts \ + ../run-test.ts \ + "$PREFIX" + +rm -rf "$TMPDIR" diff --git a/bin/pkg-test.cmd b/bin/pkg-test.cmd new file mode 100644 index 00000000..1ef21ab0 --- /dev/null +++ b/bin/pkg-test.cmd @@ -0,0 +1,12 @@ +@echo off +setlocal + +:: Get the directory of the .cmd file +set SCRIPT_DIR=%~dp0 + +:: Run pkg-build.ts with Deno, passing all arguments +deno run --allow-all "%SCRIPT_DIR%pkg-test.ts" %* +if %ERRORLEVEL% NEQ 0 exit /b %ERRORLEVEL% + +endlocal +exit /b 0 \ No newline at end of file diff --git a/brewkit/build-template.ts b/brewkit/build-template.ts new file mode 100644 index 00000000..79c9de53 --- /dev/null +++ b/brewkit/build-template.ts @@ -0,0 +1,94 @@ +import { fixup, parse, Path, Prefix, SemVer, semver } from "brewkit"; + +const pkg = parse(Deno.args[0]); +const build_dir = new Path(Deno.args[1]); +const props = new Path(Deno.args[2]); +const PKGX_DIR = new Path(Deno.args[3]); +const prefix = new Path(Deno.args[4]); + +Deno.chdir(build_dir.mkdir().string); + +console.error( + "%cpkgspec:", + "color: red", + `${pkg.project}${pkg.constraint}`, +); +console.error("%cstage:", "color: red", build_dir.parent()); + +const got_versions: { version: SemVer }[] = await versions(pkg.constraint); +const version: { version: SemVer } = got_versions + .filter(({ version }) => pkg.constraint.satisfies(version)) + .sort((a, b) => semver.compare(a.version, b.version)) + .pop()!; + +console.error("%cversion:", "color: red", version.version); + +if (Deno.env.get("GITHUB_ACTIONS")) { + console.log("::group::build"); +} + +const SEP = Deno.build.os == "windows" ? ";" : ":"; +const deps_prefixes = (Deno.env.get("PKGS") || "").split(SEP).compact(Path.abs); +const deps = deps_prefixes.reduce((acc, prefix) => { + const version = new SemVer(prefix.basename()); + const project = prefix.parent().relative({ to: PKGX_DIR }); + acc[project] = { version, prefix }; + return acc; +}, {} as Record); + +const opts = { + prefix: new Prefix(prefix), + ...(() => { + const v: SemVer & { marketing: string } = version.version as any; + v.marketing = `${v.major}.${v.minor}`; + return { + ...version, + version: v, + }; + })(), + deps, + props, + PKGX_DIR, +}; + +await build(opts); + +let platform_triple = Deno.build.target; +if (Deno.build.os == "linux") { + platform_triple += "-gnu_2.28"; +} + +Deno.writeTextFileSync( + build_dir.parent().join("build-receipt.json").string, + JSON.stringify( + { + project: pkg.project, + version: `${version.version}`, + target: platform_triple, + prefix: prefix.string, + deps: deps_prefixes, + PKGX_DIR: PKGX_DIR.string, + }, + null, + 2, + ), +); + +if (Deno.env.get("GITHUB_ACTIONS")) { + console.log("::endgroup::"); +} + +if (Deno.build.os != 'windows') { + // rm -rf required since it is possible for a pkg to depend on itself + const final_prefix = PKGX_DIR.join(pkg.project).mkdir("p").join(`v${version.version}`).rm("rf").ln("s", { + target: prefix, + }); + + await fixup( + prefix, + final_prefix, + deps_prefixes, + PKGX_DIR, + process_fixup, + ); +} diff --git a/brewkit/default-versions.ts b/brewkit/default-versions.ts new file mode 100644 index 00000000..fb1e384a --- /dev/null +++ b/brewkit/default-versions.ts @@ -0,0 +1,29 @@ +import { isNumber, isString } from "https://deno.land/x/is_what@v4.1.15/src/index.ts"; +import { github, Range, SemVer } from "brewkit"; +import { parse } from "jsr:@std/yaml@^1"; + +export default function (yamlfile: string) { + const data = Deno.readTextFileSync(yamlfile); + const yaml = parse(data) as any; + if (Array.isArray(yaml.versions)) { + return () => + yaml.versions.map((raw: string | number) => ({ + version: new SemVer(`${raw}`), + tag: `${raw}`, + })); + } + if (isString(yaml.version) || isNumber(yaml.version)) { + const tag = `${yaml.version}`; + return () => [{ + version: new SemVer(tag), + tag, + }]; + } + if (yaml.repository) { + const slug = new URL(yaml.repository).pathname.slice(1); + return async (constraint: Range) => { + return (await github.releases(slug, constraint)).compact(github.std_version_covert); + }; + } + throw new Error("you must write a `versions.ts` or provide a repository node in the `package.yml`"); +} diff --git a/brewkit/env-include.ts b/brewkit/env-include.ts new file mode 100644 index 00000000..4fdac33d --- /dev/null +++ b/brewkit/env-include.ts @@ -0,0 +1,31 @@ +import { backticks_quiet, parse, Path, undent } from "brewkit"; +const SEP = Deno.build.os == "windows" ? ";" : ":"; + +export default function env_include(pkgspecs: string) { + const args = pkgspecs.split(/\s+/).map((x) => `+${x}`).join(" "); + const out = backticks_quiet`pkgx ${args} --json=v1`; + const json = JSON.parse(out); + + for ( + const [key, values] of Object.entries(json.env as Record) + ) { + const existing_value = Deno.env.get(key); + const new_value = existing_value?.trim() ? `${values.join(SEP)}${SEP}${existing_value}` : values.join(SEP); + Deno.env.set(key, new_value); + } +} + +export function stub(pkgspecs: string) { + const args = pkgspecs.split(/\s+/).map(parse); + const path = new Path(Deno.env.get("PKGX_BIN")!); + for (const arg of args) { + let constraint = `${arg.constraint}`; + if (constraint == "*") { + constraint = ""; + } + path.join(arg.project).write(undent` + #!/bin/sh + exec pkgx ${arg.project}${constraint} "$@" + `).chmod(0o755); + } +} diff --git a/brewkit/fixup.ts b/brewkit/fixup.ts new file mode 100644 index 00000000..0cb91281 --- /dev/null +++ b/brewkit/fixup.ts @@ -0,0 +1,129 @@ +import LibrarySymlinkEnsurer from "./fixups/library-symlink-ensurer.ts"; +import { fix_pkg_config_file } from "./fixups/fix-pkg-config-file.ts"; +import { EmptyDirectoryPruner } from "./fixups/empty-dir-pruner.ts"; +import { fix_cmake_file } from "./fixups/fix-cmake-file.ts"; +import RehardLinker from "./fixups/rehard-linker.ts"; +import fix_shebang from "./fixups/fix-shebang.ts"; +import RpathFixer from "./fixups/rpath-fixer.ts"; +import Stripper from "./fixups/Stripper.ts"; +import { walk } from "jsr:@std/fs@1/walk"; +import { mime } from "./fixups/utils.ts"; +import { flatmap, Path } from "brewkit"; +import path from "node:path"; + +export default async function default_fixups( + install_prefix: Path, + final_prefix: Path, + dep_prefixes: Path[], + PKGX_DIR: Path, + process: (path: Path) => boolean = () => true, +) { + // fix pkgs putting man at the root rather than in `share` + flatmap( + install_prefix.join("man").isDirectory(), + (mandir) => mandir.mv({ into: install_prefix.join("share").mkdir() }), + ); + + let walker = walk(final_prefix.string, { + includeFiles: true, + includeDirs: true, + includeSymlinks: true, + }); + + const rpather = new RpathFixer(install_prefix, final_prefix, dep_prefixes, PKGX_DIR); + const pruner = new EmptyDirectoryPruner(); + const stripper = new Stripper(); + const rehardlinker = new RehardLinker(); + const provided_programs: string[] = []; + const shebangs: string[] = []; + const lib_symlink_fixer = new LibrarySymlinkEnsurer(); + const rms: Path[] = []; + + const maybe_add_provided_path = (path: Path) => { + switch (path.parent().basename()) { + case "bin": + case "sbin": + provided_programs.push(path.basename()); + } + }; + + for await ( + const { name, isDirectory, isFile, isSymlink, ...entry } of walker + ) { + const path = new Path(entry.path); + + if (!process(path)) { + continue; + } + + if (isSymlink) { + // fix absolute path symlinks + const target = await Deno.readLink(path.string); + if (target.startsWith("/")) { + const relative = new Path(target).relative({ to: path.parent() }); + path.rm(); + Deno.symlink(relative, path.string, { type: isFile ? "file" : "dir" }); + } + + maybe_add_provided_path(path); + } else if (isFile) { + if (!rehardlinker.push(path)) { + // if rehardlinker returns false, it means we already saw this inode + // so we can skip handling it again + maybe_add_provided_path(path); + continue; + } + + switch (await mime(path)) { + case "exe": + rpather.push(path); + stripper.push(path); //TODO no need to strip for same inode + maybe_add_provided_path(path); + lib_symlink_fixer.push(path); + break; + + case "shebang": + shebangs.push(path.string); + break; + + default: + switch (path.extname()) { + case ".pc": + await fix_pkg_config_file(path, install_prefix); + break; + case ".pyc": + case ".la": + case ".a": // static libs are not useful to our purposes + console.error(`::warning file=${path}::pruning file:`, path); + path.rm(); + //TODO need to update the empty dir pruner + continue; + case "cmake": + await fix_cmake_file(path, install_prefix); + break; + } + } + } else if (isDirectory && name.endsWith(".dSYM")) { + rms.push(path); + } else { + pruner.add(new Path(path), isDirectory); + } + } + + for (const shebang of shebangs) { + await fix_shebang(new Path(shebang), provided_programs); + } + + stripper.execute(); // strip first as rpather codesigns and strip breaks codesigning + lib_symlink_fixer.execute(); // needs to be before the rpath fixer due to fixing lib symlinks being relevant + await rpather.execute(); + await rehardlinker.execute(); + await pruner.execute(); + + // we have a “docs go on the Internet” policy + install_prefix.join("share/doc").rm("rf"); + + for (const byebye of rms) { + byebye.rm("rf"); + } +} diff --git a/brewkit/fixups/Stripper.ts b/brewkit/fixups/Stripper.ts new file mode 100644 index 00000000..929d5096 --- /dev/null +++ b/brewkit/fixups/Stripper.ts @@ -0,0 +1,32 @@ +import Path from "../path.ts"; +import run from "../run.ts"; + +export default class Stripper { + paths: Path[] = []; + + push(path: Path) { + switch (path.extname()) { + case ".bundle": + case ".o": + // cannot strip these, you always lose needed symbols AFICT + return; + } + this.paths.push(path); + } + + execute() { + for (const path of this.paths) { + if (path.string.endsWith(".dylib") || /\.so(\.\d)*$/.test(path.string)) { + run`strip -x ${path}`; + } else if (path.basename().startsWith("perl")) { + // perl uses symbols in itself for its modules + //FIXME should be easy to override default fixups and exclude this + run`strip -x ${path}`; + } else if (Deno.build.os == "darwin") { + run`strip -u -r ${path}`; + } else { + run`strip -Ss ${path}`; + } + } + } +} diff --git a/brewkit/fixups/empty-dir-pruner.ts b/brewkit/fixups/empty-dir-pruner.ts new file mode 100644 index 00000000..84522e26 --- /dev/null +++ b/brewkit/fixups/empty-dir-pruner.ts @@ -0,0 +1,21 @@ +import Path from "../path.ts"; + +export class EmptyDirectoryPruner { + candidates: Set = new Set(); + + add(p: Path, isDirectory: boolean) { + if (isDirectory) { + this.candidates.add(p.string); + } + } + + async execute() { + for (const dir of this.candidates) { + if (new Path(dir).isDirectory()) { + try { + await Deno.remove(dir); + } catch {} + } + } + } +} diff --git a/brewkit/fixups/fix-cmake-file.ts b/brewkit/fixups/fix-cmake-file.ts new file mode 100644 index 00000000..da7d4fa1 --- /dev/null +++ b/brewkit/fixups/fix-cmake-file.ts @@ -0,0 +1,17 @@ +import Path from "../path.ts"; + +export async function fix_cmake_file(path: Path, install_prefix: Path) { + // Facebook and others who use CMake sometimes rely on a libary's .cmake files + // being shipped with it. This would be fine, except they have hardcoded paths. + // But a simple solution has been found. + const orig = await path.read(); + const relative_path = install_prefix.relative({ to: path.parent() }); + const text = orig.replaceAll( + install_prefix.string, + `\${CMAKE_CURRENT_LIST_DIR}/${relative_path}`, + ); + if (orig !== text) { + console.error("%c+", "color:yellow", "fixing:", path); + path.write(text); + } +} diff --git a/brewkit/fixups/fix-pkg-config-file.ts b/brewkit/fixups/fix-pkg-config-file.ts new file mode 100644 index 00000000..ad915ae1 --- /dev/null +++ b/brewkit/fixups/fix-pkg-config-file.ts @@ -0,0 +1,14 @@ +import Path from "../path.ts"; + +export async function fix_pkg_config_file(path: Path, install_prefix: Path) { + const orig = await path.read(); + const relative_path = install_prefix.relative({ to: path.parent() }); + const text = orig.replaceAll( + install_prefix.string, + `\${pcfiledir}/${relative_path}`, + ); + if (orig !== text) { + console.log("%c+", "color:yellow", "fixing:", path); + path.write(text); + } +} diff --git a/brewkit/fixups/fix-shebang.ts b/brewkit/fixups/fix-shebang.ts new file mode 100644 index 00000000..ca7d4026 --- /dev/null +++ b/brewkit/fixups/fix-shebang.ts @@ -0,0 +1,87 @@ +import { basename as std_basename } from "jsr:@std/path@^1"; +import Path from "../path.ts"; + +export default async function (path: Path, provided_programs: string[]) { + //FIXME this could be pretty damn efficient if we can find the time + //NOTE as it stands this is HIDEOUSLY inefficient + + const contents = await Deno.readFile(path.string); + const txt = new TextDecoder().decode(contents); + const [line0, ...lines] = txt.trimStart().split("\n"); //lol + + const match = line0.match(/^#!(\s*)(\/[^\s]+)/); + if (!match) { + console.error(`::error file=${path}::could not parse shebang`); + return; + } + let interpreter = match[2]; + + switch (interpreter) { + case "/bin/bash": + case "/bin/zsh": + case "/bin/sh": + if (match[1]) { + // fix leading spaces in shebang + break; + } else { + return; + } + case "/usr/bin/env": + interpreter = (line0.match(/env\s+(.*)/)?.[1] ?? "").split(/\s+/)[0]; + if (interpreter == "-S" || !interpreter) { + console.error(`::error file=${path}::${line0.trim()}`); + return; + } + break; + } + + const basename = std_basename(interpreter); + + const shebang = (() => { + if (provided_programs.includes(basename)) { + return `#!/usr/bin/env ${basename}`; + } + + switch (basename) { + case "sh": + case "bash": + case "zsh": + return `#!/bin/${basename}`; + } + + const match = basename.match(/python(\d(\.\d+)?)/); + if (match?.[1]) { + return `#!/usr/bin/env -S pkgx python~${match[1]}`; + } + + if (Deno.build.os == "darwin") { + switch (basename) { + case "perl": + case "ruby": + // these are part of the macOS system ∴ use them + return `#!/usr/bin/${basename}`; + } + } + + return `#!/usr/bin/env -S pkgx ${basename}`; + })(); + + if (!shebang) return; + + const rewrite = `${shebang}\n${lines.join("\n")}`; + + console.error( + "%c+", + "color:yellow", + "rewriting shebang:", + path, + "to", + shebang, + ); + + const stat = Deno.lstatSync(path.string); + const needs_chmod = stat.mode && !(stat.mode & 0o200); + if (needs_chmod) Deno.chmodSync(path.string, 0o666); + await Deno.writeFile(path.string, new TextEncoder().encode(rewrite)); + if (needs_chmod) Deno.chmodSync(path.string, stat.mode!); +} diff --git a/brewkit/fixups/library-symlink-ensurer.ts b/brewkit/fixups/library-symlink-ensurer.ts new file mode 100644 index 00000000..17f99987 --- /dev/null +++ b/brewkit/fixups/library-symlink-ensurer.ts @@ -0,0 +1,47 @@ +import { Path } from "brewkit"; + +export default class LibrarySymlinkEnsurer { + files: Path[] = []; + + push(file: Path) { + switch (Deno.build.os) { + case "linux": + if (file.basename().startsWith("lib") && /\.so(\.\d+)*$/.test(file.string)) { + this.files.push(file); + } + break; + case "darwin": + if (file.basename().startsWith("lib") && file.extname() == ".dylib") { + this.files.push(file); + } + break; + } + } + + execute() { + for (const file of this.files) { + switch (Deno.build.os) { + case "linux": + const match = file.basename().match(/(.*\.so)((\.\d+)\.\d+)\.\d+$/); + if (!match) { + console.error(`::warning file=${file}::library symlink should have version in name`); + continue; + } + + const base = match[1]; + symlink_if_needed(file, `${base}`); + symlink_if_needed(file, `${base}${match[2]}`); + symlink_if_needed(file, `${base}${match[3]}`); + break; + } + } + } +} + +function symlink_if_needed(file: Path, basename: string) { + //TODO check realpath + const newpath = file.parent().join(basename); + if (!newpath.isSymlink()) { + newpath.ln("s", { target: file.basename() }); + } +} diff --git a/brewkit/fixups/rehard-linker.ts b/brewkit/fixups/rehard-linker.ts new file mode 100644 index 00000000..2ddabfaa --- /dev/null +++ b/brewkit/fixups/rehard-linker.ts @@ -0,0 +1,37 @@ +import Path from "../path.ts"; + +export default class RehardLinker { + inos: Record = {}; + paths: Record = {}; + + push(path: Path) { + // zero is an impossible inode value + const ino = Deno.statSync(path.string)?.ino ?? 0; + + if (!(ino in this.inos)) { + this.inos[ino] = path.string; + this.paths[path.string] = []; + return true; + } else { + this.paths[this.inos[ino]].push(path.string); + return false; + } + } + + async execute() { + for (const [oldpath, newpaths] of Object.entries(this.paths)) { + for (const newpath of newpaths) { + console.error( + "%c+", + "color:yellow", + "re-hardlinking", + oldpath, + "to", + newpath, + ); + await Deno.remove(newpath); + await Deno.link(oldpath, newpath); + } + } + } +} diff --git a/brewkit/fixups/rpath-fixer.ts b/brewkit/fixups/rpath-fixer.ts new file mode 100644 index 00000000..8252581e --- /dev/null +++ b/brewkit/fixups/rpath-fixer.ts @@ -0,0 +1,134 @@ +import { backticks, backticks_quiet, Path, run } from "brewkit"; +import { fromFileUrl } from "jsr:@std/path@^1.0.8"; + +export default class RpathFixer { + files: Path[] = []; + install_prefix: Path; + final_prefix: Path; + PKGX_DIR: Path; + deps_prefixes: Path[]; + + constructor(install_prefix: Path, final_prefix: Path, deps: Path[], PKGX_DIR: Path) { + this.deps_prefixes = deps; + this.install_prefix = install_prefix; + this.final_prefix = final_prefix; + this.PKGX_DIR = PKGX_DIR; + } + + push(path: Path) { + const { mode } = Deno.statSync(path.string); + if (!mode || !(mode & 0o200)) { + const newMode = (mode || 0) | 0o200; // chmod u+x + path.chmod(newMode); + } + if (Deno.build.os == "linux" && path.extname() == ".o") return; + this.files.push(path); + } + + async execute() { + switch (Deno.build.os) { + case "linux": + for (const file of this.files) { + await this.execute_linux(file); + } + break; + + case "darwin": { + const script = new Path(fromFileUrl(import.meta.url)).join( + "../../scripts/fix-macho.rb", + ); + + Deno.env.set("GEM_HOME", "/tmp/gem"); + + for (const file of this.files) { + Deno.env.set("LIBS", JSON.stringify(this.files)); + run`${script} ${file} ${this.install_prefix} ${this.final_prefix} ${this.PKGX_DIR}`; + } + } + } + } + + async execute_linux(file: Path) { + let rpaths: string[] = []; + + const LDLPATH = [ + this.final_prefix.join("lib"), + ...this.deps_prefixes.map((prefix) => prefix.join("lib")), + ].join(":"); + const old_LDLPATH = Deno.env.get("LD_LIBRARY_PATH"); + Deno.env.set("LD_LIBRARY_PATH", LDLPATH); + + for (const linked_lib of ldd(file)) { + console.error(file, linked_lib); + + let rpath = ""; + if (linked_lib.startsWith(this.final_prefix.string)) { + rpath = new Path(linked_lib).parent().relative({ + to: file.parent(), + }); + } else if (linked_lib.startsWith(this.install_prefix.string)) { + rpath = new Path(linked_lib).relative({ + to: this.install_prefix + }); + const foo = this.final_prefix.join(rpath); + rpath = new Path(foo).parent().relative({ + to: file.parent(), + }); + } else if (linked_lib.startsWith(this.PKGX_DIR.string)) { + let mangled_path = linked_lib.replace( + new RegExp("/v(\\d+)\\.(\\d+\\.)+\\d+[a-z]?/"), + (_, capture) => `/v${capture}/`, + ); + rpath = new Path(mangled_path).parent().relative({ + to: file.parent(), + }); + } else { + console.error(`::error::could not fix: ${linked_lib}`); + continue; + } + rpaths.push(rpath); + + if (/.so\d+(\.\d)+/.test(linked_lib)) { + // has linked to a version that is too strict + const basename = new Path(linked_lib).basename(); + const libname = basename.replace(/\.so.(\d+)(\.\d)+/, (_, capture) => `.so.${capture}`); + run`patchelf --replace-needed ${basename} ${libname} ${file}`; + } + } + + if (old_LDLPATH) { + Deno.env.set("LD_LIBRARY_PATH", old_LDLPATH); + } else { + Deno.env.delete("LD_LIBRARY_PATH"); + } + + rpaths = rpaths.uniq().map((rpath) => `$ORIGIN/${rpath || "."}`); + + if (rpaths.length) { + run`patchelf --set-rpath ${rpaths.join(":")} ${file}`; + } else { + run`patchelf --remove-rpath ${file}`; + } + } +} + +function ldd(file: Path): string[] { + const libs: string[] = []; + + const output = backticks_quiet`ldd ${file}`; + + for (const line of output.split("\n")) { + if (!line.includes("=>")) continue; + const [basename, fullpath] = line.trim().split(/\s+=>\s+/); + + if (fullpath.startsWith("/lib/")) continue; + if (/linux-vdso.so.\d+/.test(basename)) continue; + + //FIXME + if (/libstdc\+\+.so.\d+/.test(basename)) continue; + + const lib = new Path(fullpath.replace(/\s+\(0x[0-9a-f]+\)/, "")).realpath(); + libs.push(lib.string); + } + return libs; +} diff --git a/brewkit/fixups/utils.ts b/brewkit/fixups/utils.ts new file mode 100644 index 00000000..bac3d77a --- /dev/null +++ b/brewkit/fixups/utils.ts @@ -0,0 +1,18 @@ +import { Path } from "brewkit"; + +export async function mime(path: Path | string) { + using file = await Deno.open(path instanceof Path ? path.string : path); + const buff = new Uint8Array(4); + await file.read(buff); + + const check = (...bytes: number[]) => { + return bytes.every((byte, index) => buff[index] === byte); + }; + + // first is elf, second is macho + if (check(0x7F, 0x45, 0x4C, 0x46) || check(0xCF, 0xFA, 0xED, 0xFE)) { + return "exe"; + } else if (check(0x23, 0x21)) { + return "shebang"; + } +} diff --git a/brewkit/github.ts b/brewkit/github.ts new file mode 100644 index 00000000..72a1d123 --- /dev/null +++ b/brewkit/github.ts @@ -0,0 +1,87 @@ +import { Range, semver } from "./mod.ts"; + +// we have to fetch all releases and all tags since the GitHub API doesn't +// support filtering by semver ranges and there is no sensible other way to +// order the responses. + +async function get_headers() { + const headers: Record = { + "Accept": "application/vnd.github.v3+json", + }; + + const { state } = await Deno.permissions.query({ + name: "env", + variable: "GITHUB_TOKEN", + }); + + if (state == "granted" && Deno.env.get("GITHUB_TOKEN")) { + headers["Authorization"] = `bearer ${Deno.env.get("GITHUB_TOKEN")}`; + } + + return headers; +} + +export async function releases( + repo: string, + constraint: Range, +): Promise<{ tag_name: string; name: string }[]> { + const headers = await get_headers(); + + if (constraint.toString() == "*") { + const rsp: Response = await fetch( + `https://api.github.com/repos/${repo}/releases/latest`, + { headers }, + ); + if (!rsp.ok) { + console.error("%cerr:", "color: red", "you might need to set GITHUB_TOKEN"); + Deno.exit(1); + } + return [await rsp.json()]; + } + + let url: string | undefined = `https://api.github.com/repos/${repo}/releases?per_page=100`; + + const rv = []; + while (url) { + const rsp: Response = await fetch(url, { headers }); + if (!rsp.ok) { + console.error("%cerr:", "color: red", "you might need to set GITHUB_TOKEN"); + Deno.exit(1); + } + rv.push(...await rsp.json()); + url = rsp.headers.get("link")?.match(/<([^>]+)>;\s*rel="next"/)?.[1]; + } + return rv; +} + +export async function tags(repo: string): Promise<{ name: string }[]> { + let url: string | undefined = `https://api.github.com/repos/${repo}/tags?per_page=100`; + const headers = await get_headers(); + + const rv = []; + while (url) { + const rsp: Response = await fetch(url, { headers }); + if (!rsp.ok) { + console.error("%cerr:", "color: red", "you might need to set GITHUB_TOKEN"); + Deno.exit(1); + } + rv.push(...await rsp.json()); + url = rsp.headers.get("link")?.match(/<([^>]+)>;\s*rel="next"/)?.[1]; + } + return rv; +} + +export function std_version_covert( + { tag_name, name }: { tag_name?: string; name: string }, +) { + const tag = tag_name || name; // depends on if we fetched /releases or /tags + let parsable_tag = tag; + if (/^[a-zA-Z]+-/.test(tag)) { + // common to put the project name or something like it as the prefix + parsable_tag = tag.replace(/^[a-zA-Z]+-/, ""); + } + const version = semver.parse(parsable_tag); + if (version) { + return { version, tag }; + } +} diff --git a/brewkit/mod.ts b/brewkit/mod.ts new file mode 100644 index 00000000..b9386293 --- /dev/null +++ b/brewkit/mod.ts @@ -0,0 +1,184 @@ +import Path from "./path.ts"; +export { Path }; + +import { parse } from "https://deno.land/x/libpkgx@v0.20.3/src/utils/pkg.ts"; +export { parse }; + +import * as github from "./github.ts"; +export { github }; + +import run, { backticks, backticks_quiet } from "./run.ts"; +export { backticks, backticks_quiet, run }; + +import unarchive from "./unarchive.ts"; +export { unarchive }; + +import { PackageRequirement } from "https://deno.land/x/libpkgx@v0.20.3/src/types.ts"; +export type { PackageRequirement }; + +import * as test_utils from "./test-utils.ts"; +export { test_utils }; + +import SemVer, { + compare, + intersect, + parse as semver_parse, + Range, +} from "https://deno.land/x/libpkgx@v0.20.3/src/utils/semver.ts"; + +function intersects(a: Range, b: Range) { + try { + semver.intersect(a, b); + return true; + } catch { + return false; + } +} + +const semver = { intersect, intersects, compare, parse: semver_parse }; + +export { Range, SemVer, semver }; + +class InstallablePath extends Path { + constructor(path: Path) { + super(path); + } + + install(src: Path | string) { + src = src instanceof Path ? src : Path.cwd().join(src); + src.mv({ into: this.mkdir("p") }); + } +} + +class Prefix extends Path { + bin: InstallablePath; + lib: InstallablePath; + share: InstallablePath; + + constructor(prefix: Path) { + super(prefix); + this.bin = new InstallablePath(prefix.join("bin")); + this.lib = new InstallablePath(prefix.join("lib")); + this.share = new InstallablePath(prefix.join("share")); + } +} + +interface BuildOptions { + prefix: Prefix; + version: SemVer & { marketing: string }; + tag: string; + + /// the directory with your `package.yml` and any other files you added there + props: Path; + + // where your deps are installed and your built package will be moved after `build()` + PKGX_DIR: Path; + + deps: Record; +} + +interface TestOptions { + version: SemVer; + prefix: Path; +} + +export type { BuildOptions, TestOptions }; +export { Prefix }; + +type Falsy = false | 0 | "" | null | undefined; + +declare global { + interface Array { + compact(body?: (t: T) => S | Falsy, opts?: { rescue: boolean }): S[]; + uniq(): Array; + } +} + +Array.prototype.compact = function ( + body?: (t: T) => S | Falsy, + opts?: { rescue: boolean }, +): S[] { + const rv: S[] = []; + for (const e of this) { + try { + const f = body ? body(e) : e; + if (f) rv.push(f); + } catch (err) { + if (opts === undefined || opts.rescue === false) throw err; + } + } + return rv; +}; + +Array.prototype.uniq = function (): Array { + const set = new Set(); + return this.compact((x) => { + const s = x.toString(); + if (set.has(s)) return; + set.add(s); + return x; + }); +}; + +export function flatmap( + t: T | Falsy, + body: (t: T) => S | Falsy, + opts?: { rescue: boolean }, +): S | undefined; +export function flatmap( + t: Promise, + body: (t: T) => Promise, + opts?: { rescue: boolean }, +): Promise; +export function flatmap( + t: Promise | (T | Falsy), + body: (t: T) => (S | Falsy) | Promise, + opts?: { rescue: boolean }, +): Promise | (S | undefined) { + try { + if (t instanceof Promise) { + const foo = t.then((t) => { + if (!t) return; + const s = body(t) as Promise; + if (!s) return; + const bar = s.then((body) => body || undefined); + if (opts?.rescue) { + return bar.catch(() => { + return undefined; + }); + } else { + return bar; + } + }); + return foo; + } else { + if (t) return body(t) as (S | Falsy) || undefined; + } + } catch (err) { + if (!opts?.rescue) throw err; + } +} + +import fixup from "./fixup.ts"; +export { fixup }; + +export function inreplace(path: Path | string, from: string | RegExp, to: string) { + path = path instanceof Path ? path : Path.cwd().join(path); + console.error(`%c+`, "color:yellow", "inreplace", path); + Deno.writeTextFileSync( + path.string, + Deno.readTextFileSync(path.string).replaceAll(from, to), + ); +} + +import undent from "https://deno.land/x/outdent@v0.8.0/mod.ts"; +export { undent }; + +import env_include, { stub } from "./env-include.ts"; +export { env_include, stub }; + +import default_versions from "./default-versions.ts"; +export { default_versions }; diff --git a/brewkit/path.ts b/brewkit/path.ts new file mode 100644 index 00000000..08416c36 --- /dev/null +++ b/brewkit/path.ts @@ -0,0 +1,476 @@ +import { SEPARATOR as SEP } from "jsr:@std/path@^1"; +import { moveSync } from "jsr:@std/fs@1"; +import * as sys from "jsr:@std/path@^1"; + +// modeled after https://github.com/mxcl/Path.swift + +// everything is Sync because TypeScript will unfortunately not +// cascade `await`, meaning our chainable syntax would become: +// +// await (await foo).bar +// +// however we use async versions for “terminators”, eg. `ls()` + +//NOTE not considered good for general consumption on Windows at this time +// generally we try to workaround unix isms and there are some quirks + +export default class Path { + /// the normalized string representation of the underlying filesystem path + readonly string: string; + + /// the filesystem root + static root = new Path("/"); + + static cwd(): Path { + return new Path(Deno.cwd()); + } + + static home(): Path { + return new Path( + (() => { + switch (Deno.build.os) { + case "windows": + return Deno.env.get("USERPROFILE")!; + default: + return Deno.env.get("HOME")!; + } + })(), + ); + } + + /// normalizes the path + /// throws if not an absolute path + constructor(input: string | Path) { + if (input instanceof Path) { + this.string = input.string; + return; + } + + if (!input) { + throw new Error(`invalid absolute path: ${input}`); + } + + if (Deno.build.os == "windows") { + if (!input.match(/^[a-zA-Z]:/)) { + if (!input.startsWith("/") && !input.startsWith("\\")) { + throw new Error(`invalid absolute path: ${input}`); + } + if (!input.startsWith("\\\\")) { + // ^^ \\network\drive is valid path notation on windows + + //TODO shouldn’t be C: necessarily + // should it be based on PWD or system default drive? + // NOTE also: maybe we shouldn't do this anyway? + input = `C:\\${input}`; + } + } + input = input.replace(/\//g, "\\"); + } else if (input[0] != "/") { + throw new Error(`invalid absolute path: ${input}`); + } + + this.string = normalize(input); + + function normalize(path: string): string { + const segments = path.split(SEP); + const result = []; + + const start = Deno.build.os == "windows" ? (segments.shift() || "\\") + "\\" : "/"; + + for (const segment of segments) { + if (segment === "..") { + result.pop(); + } else if (segment !== "." && segment !== "") { + result.push(segment); + } + } + + return start + result.join(SEP); + } + } + + /// returns Path | undefined rather than throwing error if Path is not absolute + static abs(input: string | Path) { + try { + return new Path(input); + } catch { + return; + } + } + + fileURL(): URL { + let str = this.string; + if (Deno.build.os == "windows") { + str = str.replaceAll("\\", "/"); + } + return new URL(`file://${str}`); + } + + drive() { + if (Deno.build.os != "windows") { + return; + } else { + return this.string.match(/^[a-zA-Z]:/)?.[0]; + } + } + + /** + If the path represents an actual entry that is a symlink, returns the symlink’s + absolute destination. + + - Important: This is not exhaustive, the resulting path may still contain a symlink. + - Important: The path will only be different if the last path component is a symlink, any symlinks in prior components are not resolved. + - Note: If file exists but isn’t a symlink, returns `self`. + - Note: If symlink destination does not exist, is **not** an error. + */ + readlink(): Path { + try { + const output = Deno.readLinkSync(this.string); + return this.parent().join(output); + } catch (err) { + if (err instanceof Error && "code" in err) { + const code = err.code; + switch (code) { + case "EINVAL": + return this; // is file + case "ENOENT": + throw err; // there is no symlink at this path + } + } + throw err; + } + } + /** + Returns the parent directory for this path. + Path is not aware of the nature of the underlying file, but this is + irrlevant since the operation is the same irrespective of this fact. + - Note: always returns a valid path, `Path.root.parent` *is* `Path.root`. + */ + parent(): Path { + return new Path(sys.dirname(this.string)); + } + + /// returns normalized absolute path string + toString(): string { + return this.string; + } + + /// joins this path with the provided component and normalizes it + /// if you provide an absolute path that path is returned + /// rationale: usually if you are trying to join an absolute path it is a bug in your code + /// TODO should warn tho + join(...components: string[]): Path { + const joined = components.filter((x) => x).join(SEP); + if (isAbsolute(joined)) { + return new Path(joined); + } else if (joined) { + return new Path(`${this.string}${SEP}${joined}`); + } else { + return this; + } + function isAbsolute(part: string) { + if ( + Deno.build.os == "windows" && + (part?.match(/^[a-zA-Z]:/) || part?.startsWith("\\\\")) + ) { + return true; + } else { + return part.startsWith("/"); + } + } + } + + /// Returns true if the path represents an actual filesystem entry that is *not* a directory. + /// NOTE we use `stat`, so if the file is a symlink it is resolved, usually this is what you want + isFile(): Path | undefined { + try { + return Deno.statSync(this.string).isFile ? this : undefined; + } catch { + return; //FIXME + // if (err instanceof Deno.errors.NotFound == false) { + // throw err + // } + } + } + + isSymlink(): Path | undefined { + try { + return Deno.lstatSync(this.string).isSymlink ? this : undefined; + } catch { + return; //FIXME + // if (err instanceof Deno.errors.NotFound) { + // return false + // } else { + // throw err + // } + } + } + + isExecutableFile(): Path | undefined { + try { + if (!this.isFile()) return; + const info = Deno.statSync(this.string); + if (!info.mode) throw new Error(); + const is_exe = (info.mode & 0o111) > 0; + if (is_exe) return this; + } catch { + return; //FIXME catch specific errors + } + } + + isReadableFile(): Path | undefined { + try { + if (Deno.build.os != "windows") { + const { mode, isFile } = Deno.statSync(this.string); + if (isFile && mode && mode & 0o400) { + return this; + } + } else { + //FIXME not particularly efficient lol + Deno.openSync(this.string).close(); + return this; + } + } catch { + return undefined; + } + } + + exists(): Path | undefined { + //FIXME can be more efficient + try { + Deno.lstatSync(this.string); + return this; + } catch { + return; //FIXME + // if (err instanceof Deno.errors.NotFound) { + // return false + // } else { + // throw err + // } + } + } + + /// Returns true if the path represents an actual directory. + /// NOTE we use `stat`, so if the file is a symlink it is resolved, usually this is what you want + isDirectory(): Path | undefined { + try { + return Deno.statSync(this.string).isDirectory ? this : undefined; + } catch { + return; //FIXME catch specific errorrs + } + } + + async *ls(): AsyncIterable<[Path, Deno.DirEntry]> { + for await (const entry of Deno.readDir(this.string)) { + yield [this.join(entry.name), entry]; + } + } + + components(): string[] { + return this.string.split(SEP); + } + + split(): [Path, string] { + const d = this.parent(); + const b = this.basename(); + return [d, b]; + } + + /// the file extension with the leading period + extname(): string { + const match = this.string.match(/\.tar\.\w+$/); + if (match) { + return match[0]; + } else { + return sys.extname(this.string); + } + } + + basename(): string { + return sys.basename(this.string); + } + + /** + Moves a file. + + Path.root.join("bar").mv({to: Path.home.join("foo")}) + // => Path("/Users/mxcl/foo") + + - Parameter to: Destination filename. + - Parameter into: Destination directory (you get `into/${this.basename()`) + - Parameter overwrite: If true overwrites any entry that already exists at the destination. + - Returns: `to` to allow chaining. + - Note: `force` will still throw if `to` is a directory. + - Note: Throws if `overwrite` is `false` yet `to` is *already* identical to + `self` because even though *our policy* is to noop if the desired + end result preexists, checking for this condition is too expensive a + trade-off. + */ + mv( + { force, ...opts }: { to: Path; force?: boolean } | { + into: Path; + force?: boolean; + }, + ): Path { + if ("to" in opts) { + moveSync(this.string, opts.to.string, { overwrite: force }); + return opts.to; + } else { + const dst = opts.into.join(this.basename()); + moveSync(this.string, dst.string, { overwrite: force }); + return dst; + } + } + + // copy a thing, if it’s a directory we assume you want recursive because like what + // else did you want? + //FIXME operates in ”force” mode + cp(opts: { into: Path } | { to: Path }): Path { + const dst = "into" in opts ? opts.into.join(this.basename()) : opts.to; + if (this.isDirectory()) { + copyRecursive(this.string, dst.string); + } else { + Deno.copyFileSync(this.string, dst.string); + } + return dst; + } + + rm(opts?: "rf") { + if (this.exists()) { + try { + Deno.removeSync(this.string, { recursive: opts == "rf" }); + } catch (err) { + if (this.exists()) { + throw err; + } else { + // this is what we wanted, so noop + } + } + } + return this; // may seem weird but I've had cases where I wanted to chain + } + + mkdir(opts?: "p"): Path { + if (!this.isDirectory()) { + Deno.mkdirSync(this.string, { recursive: opts == "p" }); + } + return this; + } + + isEmpty(): Path | undefined { + for (const _ of Deno.readDirSync(this.string)) { + return; + } + return this; + } + + eq(that: Path): boolean { + return this.string == that.string; + } + + neq(that: Path): boolean { + return this.string != that.string; + } + + /// creates a new symlink pointing to this + /// to create a relative path symlink pass the relative string as `target` + ln(_: "s", opts: { target: Path | string }): Path { + const oldfile = opts.target instanceof Path ? opts.target.string : opts.target; + Deno.symlinkSync(oldfile, this.string); + return this; + } + + read(): Promise { + return Deno.readTextFile(this.string); + } + + write(contents: string): Path { + Deno.writeTextFileSync(this.string, contents); + return this; + } + + static mktemp(opts?: "d"): Path { + if (opts == "d") { + return new Path(Deno.makeTempDirSync()); + } else { + return new Path(Deno.makeTempFileSync()); + } + } + + touch(): Path { + try { + const date = new Date(); + Deno.utimeSync(this.string, date, date); + } catch { + Deno.openSync(this.string, { create: true, append: true }).close(); + } + return this; + } + + chmod(mode: number): Path { + if (Deno.build.os != "windows") { + Deno.chmodSync(this.string, mode); + } + return this; + } + + chuzzle(): Path | undefined { + if (this.exists()) return this; + } + + relative({ to: base }: { to: Path }): string { + const pathComps = this.string.split(SEP); + const baseComps = base.string.split(SEP); + + if (Deno.build.os == "windows") { + if (pathComps[0] != baseComps[0]) { + throw new Error( + "can't compute relative path between paths on different drives", + ); + } + } + + pathComps[0] = SEP; + baseComps[0] = SEP; + + if (this.string.startsWith(base.string)) { + return pathComps.slice(baseComps.length).join(SEP); + } else { + const newPathComps = [...pathComps]; + const newBaseComps = [...baseComps]; + + while (newPathComps[0] == newBaseComps[0]) { + newPathComps.shift(); + newBaseComps.shift(); + } + + // for some reason with this algo if they are the same directory we still get a single item at the end + const relComps = Array.from({ length: newBaseComps.length }, () => ".."); + relComps.push(...newPathComps); + return relComps.join(SEP); + } + } + + realpath(): Path { + return new Path(Deno.realPathSync(this.string)); + } + + [Symbol.for("Deno.customInspect")]() { + return this.string; + } +} + +function copyRecursive(src: string, dest: string) { + Deno.mkdirSync(dest, { recursive: true }); + + for (const entry of Deno.readDirSync(src)) { + const srcPath = `${src}/${entry.name}`; + const destPath = `${dest}/${entry.name}`; + + if (entry.isDirectory) { + copyRecursive(srcPath, destPath); + } else if (entry.isFile) { + Deno.copyFileSync(srcPath, destPath); + } + } +} diff --git a/brewkit/run.ts b/brewkit/run.ts new file mode 100644 index 00000000..24f88a5c --- /dev/null +++ b/brewkit/run.ts @@ -0,0 +1,89 @@ +function run_f(args: string[]) { + if (args.length == 1) { + args = splitArgs(args[0].trim()); + } + const cmd = args.shift()!; + + console.error("%c+", "color:yellow", cmd, ...args); + + const { success, code } = new Deno.Command(cmd, { + args, + stdout: "inherit", + stderr: "inherit", + }).outputSync(); + + if (!success) { + console.error(`%c${cmd} error`, "color:red", `(${code})`); + Deno.exit(code); + } +} + +export default function run(strings: TemplateStringsArray, ...values: any[]) { + const s = String.raw(strings, ...values); + return run_f([s]); +} + +export async function backticks( + strings: TemplateStringsArray, + ...values: any[] +) { + const cmdln = String.raw(strings, ...values); + const [cmd, ...args] = splitArgs(cmdln.trim()); + + console.error("%c+", "color:yellow", cmd, ...args); + + const proc = new Deno.Command(cmd, { args, stdout: "piped" }).spawn(); + + let rv = ""; + const decoder = new TextDecoder(); + for await (const chunk of proc.stdout) { + const decoded_chunk = decoder.decode(chunk); + console.log(decoded_chunk); + rv += decoded_chunk; + } + + const { success, code } = await proc.status; + + if (!success) { + console.error(`%c${cmd} error`, "color:red", `(${code})`); + Deno.exit(code); + } + + return rv.trimEnd(); +} + +export function backticks_quiet( + strings: TemplateStringsArray, + ...values: any[] +) { + const cmdln = String.raw(strings, ...values); + const [cmd, ...args] = cmdln.trim().split(/\s+/); + + const { success, stdout, code } = new Deno.Command(cmd, { + args, + stdout: "piped", + stderr: "inherit", + }).outputSync(); + + if (!success) { + console.error(`%c${cmd} error`, "color:red", `(${code})`); + Deno.exit(code); + } + + return new TextDecoder().decode(stdout); +} + +function splitArgs(input: string): string[] { + const regex = /"([^"]*)"|'([^']*)'|(\S+)/g; + const args: string[] = []; + let match; + + // Remove comments + input = input.replace(/#.*$/gm, ""); + + while ((match = regex.exec(input)) !== null) { + args.push(match[1] || match[2] || match[3]); + } + + return args; +} diff --git a/brewkit/scripts/convert-package.yml.ts b/brewkit/scripts/convert-package.yml.ts new file mode 100755 index 00000000..3f051e4e --- /dev/null +++ b/brewkit/scripts/convert-package.yml.ts @@ -0,0 +1,53 @@ +#!/usr/bin/env -S pkgx -q deno^2 --allow-read --allow-write + +import { parse, stringify } from "jsr:@std/yaml@^1"; + +const yaml = convert(Deno.args[0]); +const data = stringify(yaml); +console.log(data); + +function convert(path: string) { + const data = Deno.readTextFileSync(path); + const yaml = parse(data) as any; + const rv: Record = {}; + if (yaml.name) rv["display-name"] = yaml.name; + if (yaml.dependencies) { + rv.dependencies = yaml.dependencies; + } + if (yaml.linux?.dependencies) { + rv.dependencies ??= {}; + rv.dependencies.linux = yaml.linux.dependencies; + } + if (yaml.darwin?.dependencies) { + rv.dependencies ??= {}; + rv.dependencies.darwin = yaml.darwin.dependencies; + } + if (yaml.companions) { + rv.companions = {}; + for (const project of yaml.companions) { + rv.companions[project] = "*"; + } + } + if (yaml.linux?.companions) { + rv.companions ??= {}; + rv.companions.linux = {}; + for (const companion of yaml.linux.companions) { + rv.companions.linux[companion] = "*"; + } + } + if (yaml.darwin?.companions) { + rv.companions ??= {}; + rv.companions.darwin = {}; + for (const companion of yaml.darwin.companions) { + rv.companions.darwin[companion] = "*"; + } + } + if (yaml.env) { + rv.runtime = { env: yaml.env }; + } + if (yaml.programs) { + rv.provides = yaml.programs; + } + + return rv; +} diff --git a/brewkit/scripts/deps-env.ts b/brewkit/scripts/deps-env.ts new file mode 100755 index 00000000..366fae70 --- /dev/null +++ b/brewkit/scripts/deps-env.ts @@ -0,0 +1,85 @@ +#!/usr/bin/env -S deno run --allow-read --allow-env --allow-run + +import { utils, PackageRequirement } from "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/mod.ts"; +import * as yaml from "jsr:@std/yaml@^1/parse"; +import { Path } from "brewkit"; +import { fromFileUrl } from "jsr:@std/path@1/from-file-url"; + +export default async function resolveDependencies(pkgspec: string) { + const pkg = utils.pkg.parse(pkgspec); + const { deps } = get_data(pkg); + + if (deps.length === 0) return; + + const plus = deps.map((dep) => { + let str = utils.pkg.str(dep); + if (str.endsWith("*")) str = str.slice(0, -1); // pkgx^2 bug + return `+${str}`; + }); + + const proc = new Deno.Command("pkgx", { + args: ["--json=v1", "--quiet", ...plus], + stdout: "piped", + }).spawn(); + + const { success } = await proc.status; + if (!success) { + throw new Error("Fetching dependencies failed!"); + } + + const stdout = new TextDecoder().decode((await proc.output()).stdout); + return stdout ? JSON.parse(stdout) : undefined; +} + +function parseAndExportEnv(json: any) { + for (const [key, paths] of Object.entries(json.env) as [string, string[]][]) { + console.log(`export ${key}="${paths.join(":")}\${${key}:+:\$${key}}"`); + } + + for (const [_pkg, env_dict] of Object.entries(json.runtime_env)) { + for (let [key, value] of Object.entries(env_dict as any)) { + const { path } = json.pkgs.find(({ project }: any) => project === _pkg); + value = (value as string).replace("{{prefix}}", path); + console.log(`export ${key}="${value}"`); + } + } + + const pkgs = Object.values(json.pkgs).map(({ path }: any) => path); + console.log(`export PKGS=${pkgs.join(":")}`); +} + +interface YAMLType { + dependencies?: string | string[] | Record; + windows?: { dependencies?: string | string[] | Record }; + darwin?: { dependencies?: string | string[] | Record }; + linux?: { dependencies?: string | string[] | Record }; +} + +function get_data(pkg: PackageRequirement) { + const yamlfile = fromFileUrl(new URL(`../../projects/${pkg.project}/package.yml`, import.meta.url)); + const obj = yaml.parse(Deno.readTextFileSync(yamlfile)) as YAMLType | undefined; + const deps: PackageRequirement[] = []; + const os = Deno.build.os as "windows" | "darwin" | "linux"; + + deps.push(...parse_node(obj?.dependencies)); + deps.push(...parse_node(obj?.[os]?.dependencies)); + + return { deps }; +} + +function parse_node(node: unknown) { + if (typeof node === "string") { + return node.split(/\s+/).map(utils.pkg.parse); + } else if (Array.isArray(node)) { + return node.map(utils.pkg.parse); + } else if (node) { + return Object.entries(node).map(([name, constraint]) => utils.pkg.parse(`${name}${constraint}`)); + } else { + return []; + } +} + +if (import.meta.main) { + const json = await resolveDependencies(Deno.args[0]); + if (json) parseAndExportEnv(json); +} diff --git a/brewkit/scripts/fix-macho.rb b/brewkit/scripts/fix-macho.rb new file mode 100755 index 00000000..99ee6624 --- /dev/null +++ b/brewkit/scripts/fix-macho.rb @@ -0,0 +1,116 @@ +#!/usr/bin/ruby + +require 'bundler/inline' + +gemfile do + source 'https://rubygems.org' + gem 'ruby-macho', '~> 3' +end + +require 'macho' +require 'json' +require 'ostruct' + +def main + $file = MachO.open(ARGV.shift) + + $prefix = ARGV.shift + $final_prefix = ARGV.shift + $PKGX_DIR = ARGV.shift + $LIBS = JSON.parse(ENV['LIBS']); + + case $file.filetype + when :dylib + fix_id + fix_install_names + assign_rpath + when :execute + fix_install_names + assign_rpath + when :bundle + fix_install_names + assign_rpath + when :object + when :dsym + # noop + else + raise "unknown filetype: #{$file.filetype}: #{$file.filename}" + end + + $file.write! + codesign! +end + +def fix_id + # we make the id the major version to ensure things that link to this dylib + # do not hardcode the path to this specific library thus breaking things on upgrades + rel_path = Pathname.new($file.filename).relative_path_from(Pathname.new($PKGX_DIR)) + rel_path = rel_path.sub(%r{/v(\d+)\.(\d+\.)+\d+[a-z]?/}, '/v\1/') # use the pkgx `v` prefixed symlink + rel_path = rel_path.sub(%r{\.(\d+)\.(\d+\.)+\d+\.dylib$}, '.\1.dylib') # use the dylib symlink + id = "@rpath/#{rel_path}" + $file.change_dylib_id id +end + +def assign_rpath + $file.rpaths.each do |rpath| + $file.delete_rpath rpath + end + + libs = $file.linked_dylibs.select do |lib| + lib.start_with? '@rpath/' + end + + unless libs.empty? + prefix = $file.filetype == :execute ? "@executable_path" : "@loader_path" + rel_path = Pathname.new($PKGX_DIR).relative_path_from(Pathname.new($file.filename).parent) + $file.add_rpath "#{prefix}/#{rel_path}" + end +end + +def fix_install_names + $file.linked_dylibs.map do |lib| + next if lib.start_with? '/usr/' or lib.start_with? '/System/' or lib.start_with? '/Library/' + + og_lib_name = lib + + if lib.start_with? '@rpath' + file = $LIBS.find{ |dep| File.basename(dep['string']) == File.basename(lib) } + if file + lib = file['string'] + else + puts "::warning file=#{$file.filename}::missing dependency: #{lib}" + next + end + end + + if lib.start_with? $prefix + rel_path = Pathname.new($file.filename).relative_path_from($final_prefix) + rel_path = Pathname.new($prefix).join(rel_path) + rel_path = Pathname.new(lib).relative_path_from(rel_path.parent) + if $file.filetype == :execute + $file.change_install_name og_lib_name, "@executable_path/#{rel_path}" + else + $file.change_install_name og_lib_name, "@loader_path/#{rel_path}" + end + elsif !lib.start_with? '$PKGX_DIR' + puts "::error file=#{$file.filename}::unexpected install_name path: #{lib}" + else + rel_path = Pathname.new(file['string']).relative_path_from($PKGX_DIR) + rel_path = rel_path.sub(%r{/v(\d+)\.(\d+\.)+\d+[a-z]?/}, '/v\1/') # use the pkgx `v` prefixed symlink + rel_path = rel_path.sub(%r{\.(\d+)\.(\d+\.)+\d+\.dylib$}, '.\1.dylib') # use the dylib symlink + $file.change_install_name og_lib_name, "@rpath/#{rel_path}" + end + end +end + +def codesign! + signing_id = ENV['APPLE_IDENTITY'] || "-" + + _, _, status = Open3.capture3("codesign", "--sign", signing_id, "--force", + "--preserve-metadata=entitlements,requirements,flags,runtime", + $file.filename) + + raise MachO::CodeSigningError, "#{$file.filename}: signing failed!" unless status.success? +end + +main diff --git a/brewkit/test-utils.ts b/brewkit/test-utils.ts new file mode 100644 index 00000000..141ab919 --- /dev/null +++ b/brewkit/test-utils.ts @@ -0,0 +1,39 @@ +import { assert } from "jsr:@std/assert@^1"; +import { Path } from "brewkit"; + +export async function getstderr(cmdln: string) { + const [cmd, ...args] = cmdln.split(/\s+/); + const proc = new Deno.Command(cmd, { args, stderr: "piped" }).spawn(); + const out = await proc.output(); + return new TextDecoder().decode(out.stderr).trim(); +} + +export async function getstdout(cmdln: string) { + const [cmd, ...args] = cmdln.split(/\s+/); + const proc = new Deno.Command(cmd, { args, stdout: "piped" }).spawn(); + const out = await proc.output(); + return new TextDecoder().decode(out.stdout).trim(); +} + +export async function asset_stderr(cmdln: string, expected: string) { + const stderr = await getstderr(cmdln); + assert(stderr == expected, `stdout=\`${stderr}\`, expected=\`${expected}\``); +} + +export async function asset_stdout(cmdln: string, expected: string) { + const stdout = await getstdout(cmdln); + assert(stdout == expected, `stdout=\`${stdout}\`, expected=\`${expected}\``); +} + +export async function tmp(fn: (d: Path) => Promise): Promise { + //TODO actually base off of original CWD + const tmp = Path.cwd().join(Math.random().toString(36).substring(2, 15)) + .mkdir(); + try { + Deno.chdir(tmp.string); + return await fn(tmp); + } finally { + tmp.rm("rf"); + Deno.chdir(tmp.parent().string); + } +} diff --git a/brewkit/toolchain/darwin/autoconf b/brewkit/toolchain/darwin/autoconf new file mode 120000 index 00000000..e3c5b888 --- /dev/null +++ b/brewkit/toolchain/darwin/autoconf @@ -0,0 +1 @@ +../linux/autoconf \ No newline at end of file diff --git a/brewkit/toolchain/darwin/automake b/brewkit/toolchain/darwin/automake new file mode 120000 index 00000000..99b37f98 --- /dev/null +++ b/brewkit/toolchain/darwin/automake @@ -0,0 +1 @@ +../linux/automake \ No newline at end of file diff --git a/brewkit/toolchain/darwin/c++ b/brewkit/toolchain/darwin/c++ new file mode 120000 index 00000000..2652f5f4 --- /dev/null +++ b/brewkit/toolchain/darwin/c++ @@ -0,0 +1 @@ +cc \ No newline at end of file diff --git a/brewkit/toolchain/darwin/cc b/brewkit/toolchain/darwin/cc new file mode 100755 index 00000000..71291119 --- /dev/null +++ b/brewkit/toolchain/darwin/cc @@ -0,0 +1,83 @@ +#!/bin/sh +# +# intelligently inject our rpath into the arguments for build tools +# this is more reliable than setting LDFLAGS which often can cause other weird breakage +# without our rpath things don’t link correctly, which is annoying but seems +# to be because Open Source doesn’t typically use rpath stuff and we do for relocability +# reasons + +tool=$(basename "$0") +export _PKGX_DIR="$(cd "$(dirname "$0")/.." && pwd)/deps" + +case $tool in +cc|gcc|clang|c++|g++|clang++) + exec ruby "$0" "$@" + ;; +ld|lld|lld-link|ld64.lld) + for word in "$@"; do + if test "$word" = -r; then + exec /usr/bin/"$tool" "$@" + fi + done + + exec /usr/bin/"$tool" "$@" -rpath "$_PKGX_DIR" + ;; +*) + exit 2 + ;; +esac + + +#!/usr/bin/ruby + +$pkgx_dir = ENV['_PKGX_DIR'] +exe = File.basename($0) + +# remove duplicates since this in fact embeds the rpath multiple times +# and omit -nodefaultrpaths since it is not a valid flag for clang +args = ARGV.map do |arg| + arg unless arg == "-Wl,-rpath,#$pkgx_dir" or arg == "-nodefaultrpaths" +end.compact + +def is_pkgx? path + path = File.realpath path while File.symlink? path + return File.basename(path) == "pkgx" +end + +# find next example of ourselves +# this will either pick the Apple provided clang or the pkgx one +exe_path = ENV['PATH'].split(":").filter { |path| + if path == File.dirname(__FILE__) + false + elsif path == File.join($pkgx_dir, ".local/bin") + false + elsif is_pkgx?(path) + false + else + true + end +}.map { |path| + "#{path}/#{exe}" +}.find { |path| + File.exist?(path) +} + +abort "couldn’t find #{exe} in `PATH`" unless exe_path + +args = args.filter do |arg| + arg != '-Werror' +end + +for arg in args do + # figuring out what “mode” we are operating in is hard + # we don’t want to add this linker command always because it causes a warning to be + # output if we are not outputing executables/dylibs and this warning can break + # configure scripts, however the below is not fully encompassing + # we aren't sure what the rules are TBH, possibly it is as simple as if the output (`-o`) + # is a .o then we don’t add the rpath + if arg.start_with? '-l' or arg.end_with? '.dylib' + exec exe_path, *args, "-Wl,-rpath,#$pkgx_dir" + end +end + +exec exe_path, *args diff --git a/brewkit/toolchain/darwin/clang b/brewkit/toolchain/darwin/clang new file mode 120000 index 00000000..2652f5f4 --- /dev/null +++ b/brewkit/toolchain/darwin/clang @@ -0,0 +1 @@ +cc \ No newline at end of file diff --git a/brewkit/toolchain/darwin/clang++ b/brewkit/toolchain/darwin/clang++ new file mode 120000 index 00000000..2652f5f4 --- /dev/null +++ b/brewkit/toolchain/darwin/clang++ @@ -0,0 +1 @@ +cc \ No newline at end of file diff --git a/brewkit/toolchain/darwin/g++ b/brewkit/toolchain/darwin/g++ new file mode 120000 index 00000000..2652f5f4 --- /dev/null +++ b/brewkit/toolchain/darwin/g++ @@ -0,0 +1 @@ +cc \ No newline at end of file diff --git a/brewkit/toolchain/darwin/gcc b/brewkit/toolchain/darwin/gcc new file mode 120000 index 00000000..2652f5f4 --- /dev/null +++ b/brewkit/toolchain/darwin/gcc @@ -0,0 +1 @@ +cc \ No newline at end of file diff --git a/brewkit/toolchain/darwin/ld b/brewkit/toolchain/darwin/ld new file mode 120000 index 00000000..2652f5f4 --- /dev/null +++ b/brewkit/toolchain/darwin/ld @@ -0,0 +1 @@ +cc \ No newline at end of file diff --git a/brewkit/toolchain/darwin/msgfmt b/brewkit/toolchain/darwin/msgfmt new file mode 120000 index 00000000..42825bb0 --- /dev/null +++ b/brewkit/toolchain/darwin/msgfmt @@ -0,0 +1 @@ +../linux/msgfmt \ No newline at end of file diff --git a/brewkit/toolchain/darwin/pkg-config b/brewkit/toolchain/darwin/pkg-config new file mode 120000 index 00000000..29fd4be1 --- /dev/null +++ b/brewkit/toolchain/darwin/pkg-config @@ -0,0 +1 @@ +../linux/pkg-config \ No newline at end of file diff --git a/brewkit/toolchain/linux/autoconf b/brewkit/toolchain/linux/autoconf new file mode 100755 index 00000000..4ffc06d3 --- /dev/null +++ b/brewkit/toolchain/linux/autoconf @@ -0,0 +1,2 @@ +#!/bin/sh +exec pkgx +gnu.org/autoconf -- autoconf "$@" diff --git a/brewkit/toolchain/linux/automake b/brewkit/toolchain/linux/automake new file mode 100755 index 00000000..b4f53c68 --- /dev/null +++ b/brewkit/toolchain/linux/automake @@ -0,0 +1,2 @@ +#!/bin/sh +exec pkgx +gnu.org/automake -- automake "$@" diff --git a/brewkit/toolchain/linux/bzip2 b/brewkit/toolchain/linux/bzip2 new file mode 100755 index 00000000..6715211d --- /dev/null +++ b/brewkit/toolchain/linux/bzip2 @@ -0,0 +1,2 @@ +#!/bin/sh +exec /usr/local/bin/pkgx bzip2 "$@" diff --git a/brewkit/toolchain/linux/curl b/brewkit/toolchain/linux/curl new file mode 100755 index 00000000..e693b069 --- /dev/null +++ b/brewkit/toolchain/linux/curl @@ -0,0 +1,2 @@ +#!/bin/sh +exec /usr/local/bin/pkgx curl "$@" diff --git a/brewkit/toolchain/linux/libtool b/brewkit/toolchain/linux/libtool new file mode 100755 index 00000000..32c6e747 --- /dev/null +++ b/brewkit/toolchain/linux/libtool @@ -0,0 +1,2 @@ +#!/bin/sh +exec "$pkgx" +gnu.org/libtool -- libtool "$@" diff --git a/brewkit/toolchain/linux/make b/brewkit/toolchain/linux/make new file mode 100755 index 00000000..fbf015a6 --- /dev/null +++ b/brewkit/toolchain/linux/make @@ -0,0 +1,2 @@ +#!/bin/sh +exec /usr/local/bin/pkgx +gnu.org/make -- make "$@" diff --git a/brewkit/toolchain/linux/msgfmt b/brewkit/toolchain/linux/msgfmt new file mode 100755 index 00000000..9ed09778 --- /dev/null +++ b/brewkit/toolchain/linux/msgfmt @@ -0,0 +1,2 @@ +#!/bin/sh +exec pkgx msgfmt "$@" diff --git a/brewkit/toolchain/linux/patch b/brewkit/toolchain/linux/patch new file mode 100755 index 00000000..5b0edcf6 --- /dev/null +++ b/brewkit/toolchain/linux/patch @@ -0,0 +1,2 @@ +#!/bin/sh +exec /usr/local/bin/pkgx +gnu.org/patch -- patch "$@" diff --git a/brewkit/toolchain/linux/patchelf b/brewkit/toolchain/linux/patchelf new file mode 100755 index 00000000..05c5b4f7 --- /dev/null +++ b/brewkit/toolchain/linux/patchelf @@ -0,0 +1,2 @@ +#!/bin/sh +exec /usr/local/bin/pkgx patchelf "$@" diff --git a/brewkit/toolchain/linux/perl b/brewkit/toolchain/linux/perl new file mode 100755 index 00000000..1b5c8fde --- /dev/null +++ b/brewkit/toolchain/linux/perl @@ -0,0 +1,2 @@ +#!/bin/sh +exec /usr/local/bin/pkgx perl "$@" diff --git a/brewkit/toolchain/linux/pkg-config b/brewkit/toolchain/linux/pkg-config new file mode 100755 index 00000000..638bec02 --- /dev/null +++ b/brewkit/toolchain/linux/pkg-config @@ -0,0 +1,2 @@ +#!/bin/sh +exec pkgx pkg-config "$@" diff --git a/brewkit/toolchain/linux/strip b/brewkit/toolchain/linux/strip new file mode 100755 index 00000000..365b9efe --- /dev/null +++ b/brewkit/toolchain/linux/strip @@ -0,0 +1,2 @@ +#!/bin/sh +exec /usr/local/bin/pkgx +llvm.org -- strip "$@" diff --git a/brewkit/toolchain/linux/unzip b/brewkit/toolchain/linux/unzip new file mode 100755 index 00000000..1eff334c --- /dev/null +++ b/brewkit/toolchain/linux/unzip @@ -0,0 +1,2 @@ +#!/bin/sh +exec /usr/local/bin/pkgx unzip "$@" diff --git a/brewkit/toolchain/linux/xz b/brewkit/toolchain/linux/xz new file mode 100755 index 00000000..a151d724 --- /dev/null +++ b/brewkit/toolchain/linux/xz @@ -0,0 +1,2 @@ +#!/bin/sh +exec /usr/local/bin/pkgx +tukaani.org/xz -- xz "$@" diff --git a/brewkit/unarchive.ts b/brewkit/unarchive.ts new file mode 100644 index 00000000..1982e17e --- /dev/null +++ b/brewkit/unarchive.ts @@ -0,0 +1,72 @@ +import { Path } from "brewkit"; +import { fromFileUrl } from "jsr:@std/path@1/from-file-url"; +import { basename } from "node:path"; + +export default async function ( + url: string, + stripComponents = 1, +): Promise { + console.error("%c+", "color:yellow", "unarchiving:", url); + + if (url.endsWith(".zip")) { + const tmp = new Path(Deno.makeTempFileSync()); + const rsp = await fetch(url); + if (!rsp.ok) { + console.error("%cerror", "color:red", "failed to download:", url); + Deno.exit(1); + } + using file = await Deno.open(tmp.string, { write: true, create: true }); + await rsp.body!.pipeTo(file.writable); + + await new Deno.Command("unzip", { + args: [tmp.string], + stdin: "piped", + }).spawn().status; + } else { + const mode = url.endsWith(".bz2") ? "xjf" : url.endsWith(".xz") ? "xJf" : "xzf"; + const tar = new Deno.Command("tar", { + args: [mode, "-", `--strip-components=${stripComponents}`], + stdin: "piped", + }).spawn(); + + const input = await (async () => { + const root = new Path(fromFileUrl(import.meta.url)).join("../../srcs"); + const base = new Path(new URL(url).pathname).basename(); + const predownloaded_file = root.join(base).isReadableFile(); + + if (!predownloaded_file) { + const rsp: Response = await fetch(url); + if (!rsp.ok) { + console.error("::error::failed to download:", url); + Deno.exit(1); + } + if (!Deno.env.get("CI")) { + using file = await Deno.open(root.mkdir().join(basename(url)).string, { write: true, create: true }); + const [body1, body2] = rsp.body!.tee(); + await body1.pipeTo(file.writable); + return body2; + } else { + return rsp.body!; + } + } else { + console.error( + "%cℹ︎", + "color:blue", + "using pre-download:", + predownloaded_file.relative({ to: root }), + ); + const file = await Deno.open(predownloaded_file.string, { read: true }); + return file.readable; + } + })(); + + const [_, { success }] = await Promise.all([ + input.pipeTo(tar.stdin), + tar.status, + ]); + if (!success) { + console.error("%cerror", "color:red", "download and/or unarchive failed"); + Deno.exit(1); + } + } +} diff --git a/deno.jsonc b/deno.jsonc new file mode 100644 index 00000000..10077f46 --- /dev/null +++ b/deno.jsonc @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "strict": true + }, + "pkgx": "deno~2.1", + "imports": { + "brewkit": "./brewkit/mod.ts" + }, + "fmt": { + "exclude": [ + "builds", + "artifacts", + "**/*.md" // we actually would like this but we want 80 columns for + // readability and you can’t have one setting for ts and one + // for md + ], + "lineWidth": 120 + } +} diff --git a/deno.lock b/deno.lock new file mode 100644 index 00000000..0a99a497 --- /dev/null +++ b/deno.lock @@ -0,0 +1,1087 @@ +{ + "version": "4", + "specifiers": { + "jsr:@std/assert@1": "1.0.11", + "jsr:@std/bytes@^1.0.5": "1.0.5", + "jsr:@std/crypto@1": "1.0.3", + "jsr:@std/encoding@1": "1.0.5", + "jsr:@std/fs@1": "1.0.11", + "jsr:@std/internal@^1.0.5": "1.0.5", + "jsr:@std/io@0.225": "0.225.2", + "jsr:@std/path@1": "1.0.8", + "jsr:@std/path@^1.0.8": "1.0.8", + "jsr:@std/yaml@1": "1.0.5", + "npm:@aws-sdk/client-s3@3.743.0": "3.743.0", + "npm:@types/node@*": "22.5.4" + }, + "jsr": { + "@std/assert@1.0.11": { + "integrity": "2461ef3c368fe88bc60e186e7744a93112f16fd110022e113a0849e94d1c83c1", + "dependencies": [ + "jsr:@std/internal" + ] + }, + "@std/bytes@1.0.5": { + "integrity": "4465dd739d7963d964c809202ebea6d5c6b8e3829ef25c6a224290fbb8a1021e" + }, + "@std/crypto@1.0.3": { + "integrity": "a2a32f51ddef632d299e3879cd027c630dcd4d1d9a5285d6e6788072f4e51e7f" + }, + "@std/encoding@1.0.5": { + "integrity": "ecf363d4fc25bd85bd915ff6733a7e79b67e0e7806334af15f4645c569fefc04" + }, + "@std/fs@1.0.11": { + "integrity": "ba674672693340c5ebdd018b4fe1af46cb08741f42b4c538154e97d217b55bdd", + "dependencies": [ + "jsr:@std/path@^1.0.8" + ] + }, + "@std/internal@1.0.5": { + "integrity": "54a546004f769c1ac9e025abd15a76b6671ddc9687e2313b67376125650dc7ba" + }, + "@std/io@0.225.2": { + "integrity": "3c740cd4ee4c082e6cfc86458f47e2ab7cb353dc6234d5e9b1f91a2de5f4d6c7", + "dependencies": [ + "jsr:@std/bytes" + ] + }, + "@std/path@1.0.8": { + "integrity": "548fa456bb6a04d3c1a1e7477986b6cffbce95102d0bb447c67c4ee70e0364be" + }, + "@std/yaml@1.0.5": { + "integrity": "71ba3d334305ee2149391931508b2c293a8490f94a337eef3a09cade1a2a2742" + } + }, + "npm": { + "@aws-crypto/crc32@5.2.0": { + "integrity": "sha512-nLbCWqQNgUiwwtFsen1AdzAtvuLRsQS8rYgMuxCrdKf9kOssamGLuPwyTY9wyYblNr9+1XM8v6zoDTPPSIeANg==", + "dependencies": [ + "@aws-crypto/util", + "@aws-sdk/types", + "tslib" + ] + }, + "@aws-crypto/crc32c@5.2.0": { + "integrity": "sha512-+iWb8qaHLYKrNvGRbiYRHSdKRWhto5XlZUEBwDjYNf+ly5SVYG6zEoYIdxvf5R3zyeP16w4PLBn3rH1xc74Rag==", + "dependencies": [ + "@aws-crypto/util", + "@aws-sdk/types", + "tslib" + ] + }, + "@aws-crypto/sha1-browser@5.2.0": { + "integrity": "sha512-OH6lveCFfcDjX4dbAvCFSYUjJZjDr/3XJ3xHtjn3Oj5b9RjojQo8npoLeA/bNwkOkrSQ0wgrHzXk4tDRxGKJeg==", + "dependencies": [ + "@aws-crypto/supports-web-crypto", + "@aws-crypto/util", + "@aws-sdk/types", + "@aws-sdk/util-locate-window", + "@smithy/util-utf8@2.3.0", + "tslib" + ] + }, + "@aws-crypto/sha256-browser@5.2.0": { + "integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==", + "dependencies": [ + "@aws-crypto/sha256-js", + "@aws-crypto/supports-web-crypto", + "@aws-crypto/util", + "@aws-sdk/types", + "@aws-sdk/util-locate-window", + "@smithy/util-utf8@2.3.0", + "tslib" + ] + }, + "@aws-crypto/sha256-js@5.2.0": { + "integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==", + "dependencies": [ + "@aws-crypto/util", + "@aws-sdk/types", + "tslib" + ] + }, + "@aws-crypto/supports-web-crypto@5.2.0": { + "integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==", + "dependencies": [ + "tslib" + ] + }, + "@aws-crypto/util@5.2.0": { + "integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/util-utf8@2.3.0", + "tslib" + ] + }, + "@aws-sdk/client-s3@3.743.0": { + "integrity": "sha512-zfHfxjr41m3vYCmaCf0P55lJhOYj9EPKoaftBMZni5OQkA98AI7jVeIAT5nisGPrp3eDXEdV00GhFlSyUHkl7A==", + "dependencies": [ + "@aws-crypto/sha1-browser", + "@aws-crypto/sha256-browser", + "@aws-crypto/sha256-js", + "@aws-sdk/core", + "@aws-sdk/credential-provider-node", + "@aws-sdk/middleware-bucket-endpoint", + "@aws-sdk/middleware-expect-continue", + "@aws-sdk/middleware-flexible-checksums", + "@aws-sdk/middleware-host-header", + "@aws-sdk/middleware-location-constraint", + "@aws-sdk/middleware-logger", + "@aws-sdk/middleware-recursion-detection", + "@aws-sdk/middleware-sdk-s3", + "@aws-sdk/middleware-ssec", + "@aws-sdk/middleware-user-agent", + "@aws-sdk/region-config-resolver", + "@aws-sdk/signature-v4-multi-region", + "@aws-sdk/types", + "@aws-sdk/util-endpoints", + "@aws-sdk/util-user-agent-browser", + "@aws-sdk/util-user-agent-node", + "@aws-sdk/xml-builder", + "@smithy/config-resolver", + "@smithy/core", + "@smithy/eventstream-serde-browser", + "@smithy/eventstream-serde-config-resolver", + "@smithy/eventstream-serde-node", + "@smithy/fetch-http-handler", + "@smithy/hash-blob-browser", + "@smithy/hash-node", + "@smithy/hash-stream-node", + "@smithy/invalid-dependency", + "@smithy/md5-js", + "@smithy/middleware-content-length", + "@smithy/middleware-endpoint", + "@smithy/middleware-retry", + "@smithy/middleware-serde", + "@smithy/middleware-stack", + "@smithy/node-config-provider", + "@smithy/node-http-handler", + "@smithy/protocol-http", + "@smithy/smithy-client", + "@smithy/types", + "@smithy/url-parser", + "@smithy/util-base64", + "@smithy/util-body-length-browser", + "@smithy/util-body-length-node", + "@smithy/util-defaults-mode-browser", + "@smithy/util-defaults-mode-node", + "@smithy/util-endpoints", + "@smithy/util-middleware", + "@smithy/util-retry", + "@smithy/util-stream", + "@smithy/util-utf8@4.0.0", + "@smithy/util-waiter", + "tslib" + ] + }, + "@aws-sdk/client-sso@3.743.0": { + "integrity": "sha512-zzIukqTlBJ/GUxVhQUqBZNMfxK2m8J2AfWN/xdXh8uqj+yqGLFl7E9wftmDxzyj6oHOerv4s1SlE36BGavtHGA==", + "dependencies": [ + "@aws-crypto/sha256-browser", + "@aws-crypto/sha256-js", + "@aws-sdk/core", + "@aws-sdk/middleware-host-header", + "@aws-sdk/middleware-logger", + "@aws-sdk/middleware-recursion-detection", + "@aws-sdk/middleware-user-agent", + "@aws-sdk/region-config-resolver", + "@aws-sdk/types", + "@aws-sdk/util-endpoints", + "@aws-sdk/util-user-agent-browser", + "@aws-sdk/util-user-agent-node", + "@smithy/config-resolver", + "@smithy/core", + "@smithy/fetch-http-handler", + "@smithy/hash-node", + "@smithy/invalid-dependency", + "@smithy/middleware-content-length", + "@smithy/middleware-endpoint", + "@smithy/middleware-retry", + "@smithy/middleware-serde", + "@smithy/middleware-stack", + "@smithy/node-config-provider", + "@smithy/node-http-handler", + "@smithy/protocol-http", + "@smithy/smithy-client", + "@smithy/types", + "@smithy/url-parser", + "@smithy/util-base64", + "@smithy/util-body-length-browser", + "@smithy/util-body-length-node", + "@smithy/util-defaults-mode-browser", + "@smithy/util-defaults-mode-node", + "@smithy/util-endpoints", + "@smithy/util-middleware", + "@smithy/util-retry", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@aws-sdk/core@3.734.0": { + "integrity": "sha512-SxnDqf3vobdm50OLyAKfqZetv6zzwnSqwIwd3jrbopxxHKqNIM/I0xcYjD6Tn+mPig+u7iRKb9q3QnEooFTlmg==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/core", + "@smithy/node-config-provider", + "@smithy/property-provider", + "@smithy/protocol-http", + "@smithy/signature-v4", + "@smithy/smithy-client", + "@smithy/types", + "@smithy/util-middleware", + "fast-xml-parser", + "tslib" + ] + }, + "@aws-sdk/credential-provider-env@3.734.0": { + "integrity": "sha512-gtRkzYTGafnm1FPpiNO8VBmJrYMoxhDlGPYDVcijzx3DlF8dhWnowuSBCxLSi+MJMx5hvwrX2A+e/q0QAeHqmw==", + "dependencies": [ + "@aws-sdk/core", + "@aws-sdk/types", + "@smithy/property-provider", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/credential-provider-http@3.734.0": { + "integrity": "sha512-JFSL6xhONsq+hKM8xroIPhM5/FOhiQ1cov0lZxhzZWj6Ai3UAjucy3zyIFDr9MgP1KfCYNdvyaUq9/o+HWvEDg==", + "dependencies": [ + "@aws-sdk/core", + "@aws-sdk/types", + "@smithy/fetch-http-handler", + "@smithy/node-http-handler", + "@smithy/property-provider", + "@smithy/protocol-http", + "@smithy/smithy-client", + "@smithy/types", + "@smithy/util-stream", + "tslib" + ] + }, + "@aws-sdk/credential-provider-ini@3.743.0": { + "integrity": "sha512-mzIgzX9//kmx9BW/vICnLVgwu0KF2nOUfFLYF4ybBuqw13nspMavgJiZ11NwJ4aeCempgiJNnH+d/Kj4hq49nQ==", + "dependencies": [ + "@aws-sdk/core", + "@aws-sdk/credential-provider-env", + "@aws-sdk/credential-provider-http", + "@aws-sdk/credential-provider-process", + "@aws-sdk/credential-provider-sso", + "@aws-sdk/credential-provider-web-identity", + "@aws-sdk/nested-clients", + "@aws-sdk/types", + "@smithy/credential-provider-imds", + "@smithy/property-provider", + "@smithy/shared-ini-file-loader", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/credential-provider-node@3.743.0": { + "integrity": "sha512-Tm5HUfjWhI6nTcqBKvTYRYd4Y8xsLB3AvpCGqBYhItUDRIDIDsubgQfXnxaYrs4SRMzWSIpOvxu2YfqB8Z5BDQ==", + "dependencies": [ + "@aws-sdk/credential-provider-env", + "@aws-sdk/credential-provider-http", + "@aws-sdk/credential-provider-ini", + "@aws-sdk/credential-provider-process", + "@aws-sdk/credential-provider-sso", + "@aws-sdk/credential-provider-web-identity", + "@aws-sdk/types", + "@smithy/credential-provider-imds", + "@smithy/property-provider", + "@smithy/shared-ini-file-loader", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/credential-provider-process@3.734.0": { + "integrity": "sha512-zvjsUo+bkYn2vjT+EtLWu3eD6me+uun+Hws1IyWej/fKFAqiBPwyeyCgU7qjkiPQSXqk1U9+/HG9IQ6Iiz+eBw==", + "dependencies": [ + "@aws-sdk/core", + "@aws-sdk/types", + "@smithy/property-provider", + "@smithy/shared-ini-file-loader", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/credential-provider-sso@3.743.0": { + "integrity": "sha512-Qbwx1wMa2L/JPVZcDzn2vBEWoEzKBIeIu0PUN4EHhzY08KRbpe6Jd15znW6ZplpgrW9qkYc6RommJpmD8J8YpQ==", + "dependencies": [ + "@aws-sdk/client-sso", + "@aws-sdk/core", + "@aws-sdk/token-providers", + "@aws-sdk/types", + "@smithy/property-provider", + "@smithy/shared-ini-file-loader", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/credential-provider-web-identity@3.743.0": { + "integrity": "sha512-FviWKReYdEnt/dvNWddtIY+WrucZWmV2JT54jwAlhYgvFp0HjI2ldsQswvCynOg2Rp18tPVih6rFo/8NeerS8A==", + "dependencies": [ + "@aws-sdk/core", + "@aws-sdk/nested-clients", + "@aws-sdk/types", + "@smithy/property-provider", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/middleware-bucket-endpoint@3.734.0": { + "integrity": "sha512-etC7G18aF7KdZguW27GE/wpbrNmYLVT755EsFc8kXpZj8D6AFKxc7OuveinJmiy0bYXAMspJUWsF6CrGpOw6CQ==", + "dependencies": [ + "@aws-sdk/types", + "@aws-sdk/util-arn-parser", + "@smithy/node-config-provider", + "@smithy/protocol-http", + "@smithy/types", + "@smithy/util-config-provider", + "tslib" + ] + }, + "@aws-sdk/middleware-expect-continue@3.734.0": { + "integrity": "sha512-P38/v1l6HjuB2aFUewt7ueAW5IvKkFcv5dalPtbMGRhLeyivBOHwbCyuRKgVs7z7ClTpu9EaViEGki2jEQqEsQ==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/protocol-http", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/middleware-flexible-checksums@3.735.0": { + "integrity": "sha512-Tx7lYTPwQFRe/wQEHMR6Drh/S+X0ToAEq1Ava9QyxV1riwtepzRLojpNDELFb3YQVVYbX7FEiBMCJLMkmIIY+A==", + "dependencies": [ + "@aws-crypto/crc32", + "@aws-crypto/crc32c", + "@aws-crypto/util", + "@aws-sdk/core", + "@aws-sdk/types", + "@smithy/is-array-buffer@4.0.0", + "@smithy/node-config-provider", + "@smithy/protocol-http", + "@smithy/types", + "@smithy/util-middleware", + "@smithy/util-stream", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@aws-sdk/middleware-host-header@3.734.0": { + "integrity": "sha512-LW7RRgSOHHBzWZnigNsDIzu3AiwtjeI2X66v+Wn1P1u+eXssy1+up4ZY/h+t2sU4LU36UvEf+jrZti9c6vRnFw==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/protocol-http", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/middleware-location-constraint@3.734.0": { + "integrity": "sha512-EJEIXwCQhto/cBfHdm3ZOeLxd2NlJD+X2F+ZTOxzokuhBtY0IONfC/91hOo5tWQweerojwshSMHRCKzRv1tlwg==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/middleware-logger@3.734.0": { + "integrity": "sha512-mUMFITpJUW3LcKvFok176eI5zXAUomVtahb9IQBwLzkqFYOrMJvWAvoV4yuxrJ8TlQBG8gyEnkb9SnhZvjg67w==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/middleware-recursion-detection@3.734.0": { + "integrity": "sha512-CUat2d9ITsFc2XsmeiRQO96iWpxSKYFjxvj27Hc7vo87YUHRnfMfnc8jw1EpxEwMcvBD7LsRa6vDNky6AjcrFA==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/protocol-http", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/middleware-sdk-s3@3.740.0": { + "integrity": "sha512-VML9TzNoQdAs5lSPQSEgZiPgMUSz2H7SltaLb9g4tHwKK5xQoTq5WcDd6V1d2aPxSN5Q2Q63aiVUBby6MdUN/Q==", + "dependencies": [ + "@aws-sdk/core", + "@aws-sdk/types", + "@aws-sdk/util-arn-parser", + "@smithy/core", + "@smithy/node-config-provider", + "@smithy/protocol-http", + "@smithy/signature-v4", + "@smithy/smithy-client", + "@smithy/types", + "@smithy/util-config-provider", + "@smithy/util-middleware", + "@smithy/util-stream", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@aws-sdk/middleware-ssec@3.734.0": { + "integrity": "sha512-d4yd1RrPW/sspEXizq2NSOUivnheac6LPeLSLnaeTbBG9g1KqIqvCzP1TfXEqv2CrWfHEsWtJpX7oyjySSPvDQ==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/middleware-user-agent@3.743.0": { + "integrity": "sha512-bpAhfmsOc6PR3JTOYbJtdD4lw1hSUBCJMlOIrwVUJS4Jiuscxn8+HOSjkSNXB/BaleYOVQ+xlR62Qlnjn4o8rw==", + "dependencies": [ + "@aws-sdk/core", + "@aws-sdk/types", + "@aws-sdk/util-endpoints", + "@smithy/core", + "@smithy/protocol-http", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/nested-clients@3.743.0": { + "integrity": "sha512-nq0lLbo8OY07tUxNPzA8ShhOXu9u5Q6MkI1POEuhl8nhyyci8JR98D7cEg3YLOxRmrxvs3mlvOa3wZux0ah5UA==", + "dependencies": [ + "@aws-crypto/sha256-browser", + "@aws-crypto/sha256-js", + "@aws-sdk/core", + "@aws-sdk/middleware-host-header", + "@aws-sdk/middleware-logger", + "@aws-sdk/middleware-recursion-detection", + "@aws-sdk/middleware-user-agent", + "@aws-sdk/region-config-resolver", + "@aws-sdk/types", + "@aws-sdk/util-endpoints", + "@aws-sdk/util-user-agent-browser", + "@aws-sdk/util-user-agent-node", + "@smithy/config-resolver", + "@smithy/core", + "@smithy/fetch-http-handler", + "@smithy/hash-node", + "@smithy/invalid-dependency", + "@smithy/middleware-content-length", + "@smithy/middleware-endpoint", + "@smithy/middleware-retry", + "@smithy/middleware-serde", + "@smithy/middleware-stack", + "@smithy/node-config-provider", + "@smithy/node-http-handler", + "@smithy/protocol-http", + "@smithy/smithy-client", + "@smithy/types", + "@smithy/url-parser", + "@smithy/util-base64", + "@smithy/util-body-length-browser", + "@smithy/util-body-length-node", + "@smithy/util-defaults-mode-browser", + "@smithy/util-defaults-mode-node", + "@smithy/util-endpoints", + "@smithy/util-middleware", + "@smithy/util-retry", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@aws-sdk/region-config-resolver@3.734.0": { + "integrity": "sha512-Lvj1kPRC5IuJBr9DyJ9T9/plkh+EfKLy+12s/mykOy1JaKHDpvj+XGy2YO6YgYVOb8JFtaqloid+5COtje4JTQ==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/node-config-provider", + "@smithy/types", + "@smithy/util-config-provider", + "@smithy/util-middleware", + "tslib" + ] + }, + "@aws-sdk/signature-v4-multi-region@3.740.0": { + "integrity": "sha512-w+psidN3i+kl51nQEV3V+fKjKUqcEbqUA1GtubruDBvBqrl5El/fU2NF3Lo53y8CfI9wCdf3V7KOEpHIqxHNng==", + "dependencies": [ + "@aws-sdk/middleware-sdk-s3", + "@aws-sdk/types", + "@smithy/protocol-http", + "@smithy/signature-v4", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/token-providers@3.743.0": { + "integrity": "sha512-t60WqBM37KnUiD5CE5BhqO6RCgGm5pnPD22UGCTB+Rlbc1L5HRXrzrXoITtqsJ40d7sPo9QxNAmzLmhVHvig5g==", + "dependencies": [ + "@aws-sdk/nested-clients", + "@aws-sdk/types", + "@smithy/property-provider", + "@smithy/shared-ini-file-loader", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/types@3.734.0": { + "integrity": "sha512-o11tSPTT70nAkGV1fN9wm/hAIiLPyWX6SuGf+9JyTp7S/rC2cFWhR26MvA69nplcjNaXVzB0f+QFrLXXjOqCrg==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/util-arn-parser@3.723.0": { + "integrity": "sha512-ZhEfvUwNliOQROcAk34WJWVYTlTa4694kSVhDSjW6lE1bMataPnIN8A0ycukEzBXmd8ZSoBcQLn6lKGl7XIJ5w==", + "dependencies": [ + "tslib" + ] + }, + "@aws-sdk/util-endpoints@3.743.0": { + "integrity": "sha512-sN1l559zrixeh5x+pttrnd0A3+r34r0tmPkJ/eaaMaAzXqsmKU/xYre9K3FNnsSS1J1k4PEfk/nHDTVUgFYjnw==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/types", + "@smithy/util-endpoints", + "tslib" + ] + }, + "@aws-sdk/util-locate-window@3.723.0": { + "integrity": "sha512-Yf2CS10BqK688DRsrKI/EO6B8ff5J86NXe4C+VCysK7UOgN0l1zOTeTukZ3H8Q9tYYX3oaF1961o8vRkFm7Nmw==", + "dependencies": [ + "tslib" + ] + }, + "@aws-sdk/util-user-agent-browser@3.734.0": { + "integrity": "sha512-xQTCus6Q9LwUuALW+S76OL0jcWtMOVu14q+GoLnWPUM7QeUw963oQcLhF7oq0CtaLLKyl4GOUfcwc773Zmwwng==", + "dependencies": [ + "@aws-sdk/types", + "@smithy/types", + "bowser", + "tslib" + ] + }, + "@aws-sdk/util-user-agent-node@3.743.0": { + "integrity": "sha512-YQ2vFYw6UIIgkRQnlU3w/ZZlTHQZL+Ln3EwjKyfsdvUkzlMYS6pRtSXRh42S7BF7TWaO1l9wBYSsF57PB3LR7A==", + "dependencies": [ + "@aws-sdk/middleware-user-agent", + "@aws-sdk/types", + "@smithy/node-config-provider", + "@smithy/types", + "tslib" + ] + }, + "@aws-sdk/xml-builder@3.734.0": { + "integrity": "sha512-Zrjxi5qwGEcUsJ0ru7fRtW74WcTS0rbLcehoFB+rN1GRi2hbLcFaYs4PwVA5diLeAJH0gszv3x4Hr/S87MfbKQ==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/abort-controller@4.0.1": { + "integrity": "sha512-fiUIYgIgRjMWznk6iLJz35K2YxSLHzLBA/RC6lBrKfQ8fHbPfvk7Pk9UvpKoHgJjI18MnbPuEju53zcVy6KF1g==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/chunked-blob-reader-native@4.0.0": { + "integrity": "sha512-R9wM2yPmfEMsUmlMlIgSzOyICs0x9uu7UTHoccMyt7BWw8shcGM8HqB355+BZCPBcySvbTYMs62EgEQkNxz2ig==", + "dependencies": [ + "@smithy/util-base64", + "tslib" + ] + }, + "@smithy/chunked-blob-reader@5.0.0": { + "integrity": "sha512-+sKqDBQqb036hh4NPaUiEkYFkTUGYzRsn3EuFhyfQfMy6oGHEUJDurLP9Ufb5dasr/XiAmPNMr6wa9afjQB+Gw==", + "dependencies": [ + "tslib" + ] + }, + "@smithy/config-resolver@4.0.1": { + "integrity": "sha512-Igfg8lKu3dRVkTSEm98QpZUvKEOa71jDX4vKRcvJVyRc3UgN3j7vFMf0s7xLQhYmKa8kyJGQgUJDOV5V3neVlQ==", + "dependencies": [ + "@smithy/node-config-provider", + "@smithy/types", + "@smithy/util-config-provider", + "@smithy/util-middleware", + "tslib" + ] + }, + "@smithy/core@3.1.2": { + "integrity": "sha512-htwQXkbdF13uwwDevz9BEzL5ABK+1sJpVQXywwGSH973AVOvisHNfpcB8A8761G6XgHoS2kHPqc9DqHJ2gp+/Q==", + "dependencies": [ + "@smithy/middleware-serde", + "@smithy/protocol-http", + "@smithy/types", + "@smithy/util-body-length-browser", + "@smithy/util-middleware", + "@smithy/util-stream", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@smithy/credential-provider-imds@4.0.1": { + "integrity": "sha512-l/qdInaDq1Zpznpmev/+52QomsJNZ3JkTl5yrTl02V6NBgJOQ4LY0SFw/8zsMwj3tLe8vqiIuwF6nxaEwgf6mg==", + "dependencies": [ + "@smithy/node-config-provider", + "@smithy/property-provider", + "@smithy/types", + "@smithy/url-parser", + "tslib" + ] + }, + "@smithy/eventstream-codec@4.0.1": { + "integrity": "sha512-Q2bCAAR6zXNVtJgifsU16ZjKGqdw/DyecKNgIgi7dlqw04fqDu0mnq+JmGphqheypVc64CYq3azSuCpAdFk2+A==", + "dependencies": [ + "@aws-crypto/crc32", + "@smithy/types", + "@smithy/util-hex-encoding", + "tslib" + ] + }, + "@smithy/eventstream-serde-browser@4.0.1": { + "integrity": "sha512-HbIybmz5rhNg+zxKiyVAnvdM3vkzjE6ccrJ620iPL8IXcJEntd3hnBl+ktMwIy12Te/kyrSbUb8UCdnUT4QEdA==", + "dependencies": [ + "@smithy/eventstream-serde-universal", + "@smithy/types", + "tslib" + ] + }, + "@smithy/eventstream-serde-config-resolver@4.0.1": { + "integrity": "sha512-lSipaiq3rmHguHa3QFF4YcCM3VJOrY9oq2sow3qlhFY+nBSTF/nrO82MUQRPrxHQXA58J5G1UnU2WuJfi465BA==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/eventstream-serde-node@4.0.1": { + "integrity": "sha512-o4CoOI6oYGYJ4zXo34U8X9szDe3oGjmHgsMGiZM0j4vtNoT+h80TLnkUcrLZR3+E6HIxqW+G+9WHAVfl0GXK0Q==", + "dependencies": [ + "@smithy/eventstream-serde-universal", + "@smithy/types", + "tslib" + ] + }, + "@smithy/eventstream-serde-universal@4.0.1": { + "integrity": "sha512-Z94uZp0tGJuxds3iEAZBqGU2QiaBHP4YytLUjwZWx+oUeohCsLyUm33yp4MMBmhkuPqSbQCXq5hDet6JGUgHWA==", + "dependencies": [ + "@smithy/eventstream-codec", + "@smithy/types", + "tslib" + ] + }, + "@smithy/fetch-http-handler@5.0.1": { + "integrity": "sha512-3aS+fP28urrMW2KTjb6z9iFow6jO8n3MFfineGbndvzGZit3taZhKWtTorf+Gp5RpFDDafeHlhfsGlDCXvUnJA==", + "dependencies": [ + "@smithy/protocol-http", + "@smithy/querystring-builder", + "@smithy/types", + "@smithy/util-base64", + "tslib" + ] + }, + "@smithy/hash-blob-browser@4.0.1": { + "integrity": "sha512-rkFIrQOKZGS6i1D3gKJ8skJ0RlXqDvb1IyAphksaFOMzkn3v3I1eJ8m7OkLj0jf1McP63rcCEoLlkAn/HjcTRw==", + "dependencies": [ + "@smithy/chunked-blob-reader", + "@smithy/chunked-blob-reader-native", + "@smithy/types", + "tslib" + ] + }, + "@smithy/hash-node@4.0.1": { + "integrity": "sha512-TJ6oZS+3r2Xu4emVse1YPB3Dq3d8RkZDKcPr71Nj/lJsdAP1c7oFzYqEn1IBc915TsgLl2xIJNuxCz+gLbLE0w==", + "dependencies": [ + "@smithy/types", + "@smithy/util-buffer-from@4.0.0", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@smithy/hash-stream-node@4.0.1": { + "integrity": "sha512-U1rAE1fxmReCIr6D2o/4ROqAQX+GffZpyMt3d7njtGDr2pUNmAKRWa49gsNVhCh2vVAuf3wXzWwNr2YN8PAXIw==", + "dependencies": [ + "@smithy/types", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@smithy/invalid-dependency@4.0.1": { + "integrity": "sha512-gdudFPf4QRQ5pzj7HEnu6FhKRi61BfH/Gk5Yf6O0KiSbr1LlVhgjThcvjdu658VE6Nve8vaIWB8/fodmS1rBPQ==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/is-array-buffer@2.2.0": { + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "dependencies": [ + "tslib" + ] + }, + "@smithy/is-array-buffer@4.0.0": { + "integrity": "sha512-saYhF8ZZNoJDTvJBEWgeBccCg+yvp1CX+ed12yORU3NilJScfc6gfch2oVb4QgxZrGUx3/ZJlb+c/dJbyupxlw==", + "dependencies": [ + "tslib" + ] + }, + "@smithy/md5-js@4.0.1": { + "integrity": "sha512-HLZ647L27APi6zXkZlzSFZIjpo8po45YiyjMGJZM3gyDY8n7dPGdmxIIljLm4gPt/7rRvutLTTkYJpZVfG5r+A==", + "dependencies": [ + "@smithy/types", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@smithy/middleware-content-length@4.0.1": { + "integrity": "sha512-OGXo7w5EkB5pPiac7KNzVtfCW2vKBTZNuCctn++TTSOMpe6RZO/n6WEC1AxJINn3+vWLKW49uad3lo/u0WJ9oQ==", + "dependencies": [ + "@smithy/protocol-http", + "@smithy/types", + "tslib" + ] + }, + "@smithy/middleware-endpoint@4.0.3": { + "integrity": "sha512-YdbmWhQF5kIxZjWqPIgboVfi8i5XgiYMM7GGKFMTvBei4XjNQfNv8sukT50ITvgnWKKKpOtp0C0h7qixLgb77Q==", + "dependencies": [ + "@smithy/core", + "@smithy/middleware-serde", + "@smithy/node-config-provider", + "@smithy/shared-ini-file-loader", + "@smithy/types", + "@smithy/url-parser", + "@smithy/util-middleware", + "tslib" + ] + }, + "@smithy/middleware-retry@4.0.4": { + "integrity": "sha512-wmxyUBGHaYUqul0wZiset4M39SMtDBOtUr2KpDuftKNN74Do9Y36Go6Eqzj9tL0mIPpr31ulB5UUtxcsCeGXsQ==", + "dependencies": [ + "@smithy/node-config-provider", + "@smithy/protocol-http", + "@smithy/service-error-classification", + "@smithy/smithy-client", + "@smithy/types", + "@smithy/util-middleware", + "@smithy/util-retry", + "tslib", + "uuid" + ] + }, + "@smithy/middleware-serde@4.0.2": { + "integrity": "sha512-Sdr5lOagCn5tt+zKsaW+U2/iwr6bI9p08wOkCp6/eL6iMbgdtc2R5Ety66rf87PeohR0ExI84Txz9GYv5ou3iQ==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/middleware-stack@4.0.1": { + "integrity": "sha512-dHwDmrtR/ln8UTHpaIavRSzeIk5+YZTBtLnKwDW3G2t6nAupCiQUvNzNoHBpik63fwUaJPtlnMzXbQrNFWssIA==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/node-config-provider@4.0.1": { + "integrity": "sha512-8mRTjvCtVET8+rxvmzRNRR0hH2JjV0DFOmwXPrISmTIJEfnCBugpYYGAsCj8t41qd+RB5gbheSQ/6aKZCQvFLQ==", + "dependencies": [ + "@smithy/property-provider", + "@smithy/shared-ini-file-loader", + "@smithy/types", + "tslib" + ] + }, + "@smithy/node-http-handler@4.0.2": { + "integrity": "sha512-X66H9aah9hisLLSnGuzRYba6vckuFtGE+a5DcHLliI/YlqKrGoxhisD5XbX44KyoeRzoNlGr94eTsMVHFAzPOw==", + "dependencies": [ + "@smithy/abort-controller", + "@smithy/protocol-http", + "@smithy/querystring-builder", + "@smithy/types", + "tslib" + ] + }, + "@smithy/property-provider@4.0.1": { + "integrity": "sha512-o+VRiwC2cgmk/WFV0jaETGOtX16VNPp2bSQEzu0whbReqE1BMqsP2ami2Vi3cbGVdKu1kq9gQkDAGKbt0WOHAQ==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/protocol-http@5.0.1": { + "integrity": "sha512-TE4cpj49jJNB/oHyh/cRVEgNZaoPaxd4vteJNB0yGidOCVR0jCw/hjPVsT8Q8FRmj8Bd3bFZt8Dh7xGCT+xMBQ==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/querystring-builder@4.0.1": { + "integrity": "sha512-wU87iWZoCbcqrwszsOewEIuq+SU2mSoBE2CcsLwE0I19m0B2gOJr1MVjxWcDQYOzHbR1xCk7AcOBbGFUYOKvdg==", + "dependencies": [ + "@smithy/types", + "@smithy/util-uri-escape", + "tslib" + ] + }, + "@smithy/querystring-parser@4.0.1": { + "integrity": "sha512-Ma2XC7VS9aV77+clSFylVUnPZRindhB7BbmYiNOdr+CHt/kZNJoPP0cd3QxCnCFyPXC4eybmyE98phEHkqZ5Jw==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/service-error-classification@4.0.1": { + "integrity": "sha512-3JNjBfOWpj/mYfjXJHB4Txc/7E4LVq32bwzE7m28GN79+M1f76XHflUaSUkhOriprPDzev9cX/M+dEB80DNDKA==", + "dependencies": [ + "@smithy/types" + ] + }, + "@smithy/shared-ini-file-loader@4.0.1": { + "integrity": "sha512-hC8F6qTBbuHRI/uqDgqqi6J0R4GtEZcgrZPhFQnMhfJs3MnUTGSnR1NSJCJs5VWlMydu0kJz15M640fJlRsIOw==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/signature-v4@5.0.1": { + "integrity": "sha512-nCe6fQ+ppm1bQuw5iKoeJ0MJfz2os7Ic3GBjOkLOPtavbD1ONoyE3ygjBfz2ythFWm4YnRm6OxW+8p/m9uCoIA==", + "dependencies": [ + "@smithy/is-array-buffer@4.0.0", + "@smithy/protocol-http", + "@smithy/types", + "@smithy/util-hex-encoding", + "@smithy/util-middleware", + "@smithy/util-uri-escape", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@smithy/smithy-client@4.1.3": { + "integrity": "sha512-A2Hz85pu8BJJaYFdX8yb1yocqigyqBzn+OVaVgm+Kwi/DkN8vhN2kbDVEfADo6jXf5hPKquMLGA3UINA64UZ7A==", + "dependencies": [ + "@smithy/core", + "@smithy/middleware-endpoint", + "@smithy/middleware-stack", + "@smithy/protocol-http", + "@smithy/types", + "@smithy/util-stream", + "tslib" + ] + }, + "@smithy/types@4.1.0": { + "integrity": "sha512-enhjdwp4D7CXmwLtD6zbcDMbo6/T6WtuuKCY49Xxc6OMOmUWlBEBDREsxxgV2LIdeQPW756+f97GzcgAwp3iLw==", + "dependencies": [ + "tslib" + ] + }, + "@smithy/url-parser@4.0.1": { + "integrity": "sha512-gPXcIEUtw7VlK8f/QcruNXm7q+T5hhvGu9tl63LsJPZ27exB6dtNwvh2HIi0v7JcXJ5emBxB+CJxwaLEdJfA+g==", + "dependencies": [ + "@smithy/querystring-parser", + "@smithy/types", + "tslib" + ] + }, + "@smithy/util-base64@4.0.0": { + "integrity": "sha512-CvHfCmO2mchox9kjrtzoHkWHxjHZzaFojLc8quxXY7WAAMAg43nuxwv95tATVgQFNDwd4M9S1qFzj40Ul41Kmg==", + "dependencies": [ + "@smithy/util-buffer-from@4.0.0", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@smithy/util-body-length-browser@4.0.0": { + "integrity": "sha512-sNi3DL0/k64/LO3A256M+m3CDdG6V7WKWHdAiBBMUN8S3hK3aMPhwnPik2A/a2ONN+9doY9UxaLfgqsIRg69QA==", + "dependencies": [ + "tslib" + ] + }, + "@smithy/util-body-length-node@4.0.0": { + "integrity": "sha512-q0iDP3VsZzqJyje8xJWEJCNIu3lktUGVoSy1KB0UWym2CL1siV3artm+u1DFYTLejpsrdGyCSWBdGNjJzfDPjg==", + "dependencies": [ + "tslib" + ] + }, + "@smithy/util-buffer-from@2.2.0": { + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "dependencies": [ + "@smithy/is-array-buffer@2.2.0", + "tslib" + ] + }, + "@smithy/util-buffer-from@4.0.0": { + "integrity": "sha512-9TOQ7781sZvddgO8nxueKi3+yGvkY35kotA0Y6BWRajAv8jjmigQ1sBwz0UX47pQMYXJPahSKEKYFgt+rXdcug==", + "dependencies": [ + "@smithy/is-array-buffer@4.0.0", + "tslib" + ] + }, + "@smithy/util-config-provider@4.0.0": { + "integrity": "sha512-L1RBVzLyfE8OXH+1hsJ8p+acNUSirQnWQ6/EgpchV88G6zGBTDPdXiiExei6Z1wR2RxYvxY/XLw6AMNCCt8H3w==", + "dependencies": [ + "tslib" + ] + }, + "@smithy/util-defaults-mode-browser@4.0.4": { + "integrity": "sha512-Ej1bV5sbrIfH++KnWxjjzFNq9nyP3RIUq2c9Iqq7SmMO/idUR24sqvKH2LUQFTSPy/K7G4sB2m8n7YYlEAfZaw==", + "dependencies": [ + "@smithy/property-provider", + "@smithy/smithy-client", + "@smithy/types", + "bowser", + "tslib" + ] + }, + "@smithy/util-defaults-mode-node@4.0.4": { + "integrity": "sha512-HE1I7gxa6yP7ZgXPCFfZSDmVmMtY7SHqzFF55gM/GPegzZKaQWZZ+nYn9C2Cc3JltCMyWe63VPR3tSFDEvuGjw==", + "dependencies": [ + "@smithy/config-resolver", + "@smithy/credential-provider-imds", + "@smithy/node-config-provider", + "@smithy/property-provider", + "@smithy/smithy-client", + "@smithy/types", + "tslib" + ] + }, + "@smithy/util-endpoints@3.0.1": { + "integrity": "sha512-zVdUENQpdtn9jbpD9SCFK4+aSiavRb9BxEtw9ZGUR1TYo6bBHbIoi7VkrFQ0/RwZlzx0wRBaRmPclj8iAoJCLA==", + "dependencies": [ + "@smithy/node-config-provider", + "@smithy/types", + "tslib" + ] + }, + "@smithy/util-hex-encoding@4.0.0": { + "integrity": "sha512-Yk5mLhHtfIgW2W2WQZWSg5kuMZCVbvhFmC7rV4IO2QqnZdbEFPmQnCcGMAX2z/8Qj3B9hYYNjZOhWym+RwhePw==", + "dependencies": [ + "tslib" + ] + }, + "@smithy/util-middleware@4.0.1": { + "integrity": "sha512-HiLAvlcqhbzhuiOa0Lyct5IIlyIz0PQO5dnMlmQ/ubYM46dPInB+3yQGkfxsk6Q24Y0n3/JmcA1v5iEhmOF5mA==", + "dependencies": [ + "@smithy/types", + "tslib" + ] + }, + "@smithy/util-retry@4.0.1": { + "integrity": "sha512-WmRHqNVwn3kI3rKk1LsKcVgPBG6iLTBGC1iYOV3GQegwJ3E8yjzHytPt26VNzOWr1qu0xE03nK0Ug8S7T7oufw==", + "dependencies": [ + "@smithy/service-error-classification", + "@smithy/types", + "tslib" + ] + }, + "@smithy/util-stream@4.0.2": { + "integrity": "sha512-0eZ4G5fRzIoewtHtwaYyl8g2C+osYOT4KClXgfdNEDAgkbe2TYPqcnw4GAWabqkZCax2ihRGPe9LZnsPdIUIHA==", + "dependencies": [ + "@smithy/fetch-http-handler", + "@smithy/node-http-handler", + "@smithy/types", + "@smithy/util-base64", + "@smithy/util-buffer-from@4.0.0", + "@smithy/util-hex-encoding", + "@smithy/util-utf8@4.0.0", + "tslib" + ] + }, + "@smithy/util-uri-escape@4.0.0": { + "integrity": "sha512-77yfbCbQMtgtTylO9itEAdpPXSog3ZxMe09AEhm0dU0NLTalV70ghDZFR+Nfi1C60jnJoh/Re4090/DuZh2Omg==", + "dependencies": [ + "tslib" + ] + }, + "@smithy/util-utf8@2.3.0": { + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "dependencies": [ + "@smithy/util-buffer-from@2.2.0", + "tslib" + ] + }, + "@smithy/util-utf8@4.0.0": { + "integrity": "sha512-b+zebfKCfRdgNJDknHCob3O7FpeYQN6ZG6YLExMcasDHsCXlsXCEuiPZeLnJLpwa5dvPetGlnGCiMHuLwGvFow==", + "dependencies": [ + "@smithy/util-buffer-from@4.0.0", + "tslib" + ] + }, + "@smithy/util-waiter@4.0.2": { + "integrity": "sha512-piUTHyp2Axx3p/kc2CIJkYSv0BAaheBQmbACZgQSSfWUumWNW+R1lL+H9PDBxKJkvOeEX+hKYEFiwO8xagL8AQ==", + "dependencies": [ + "@smithy/abort-controller", + "@smithy/types", + "tslib" + ] + }, + "@types/node@22.5.4": { + "integrity": "sha512-FDuKUJQm/ju9fT/SeX/6+gBzoPzlVCzfzmGkwKvRHQVxi4BntVbyIwf6a4Xn62mrvndLiml6z/UBXIdEVjQLXg==", + "dependencies": [ + "undici-types" + ] + }, + "bowser@2.11.0": { + "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==" + }, + "fast-xml-parser@4.4.1": { + "integrity": "sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==", + "dependencies": [ + "strnum" + ] + }, + "strnum@1.0.5": { + "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==" + }, + "tslib@2.8.1": { + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==" + }, + "undici-types@6.19.8": { + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==" + }, + "uuid@9.0.1": { + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==" + } + }, + "remote": { + "https://deno.land/x/is_what@v4.1.15/src/index.ts": "e55b975d532b71a0e32501ada85ae3c67993b75dc1047c6d1a2e00368b789af0", + "https://deno.land/x/libpkgx@v0.20.3/src/deps.ts": "f725c174d63a5c613387afa229046aafaedfd9eb59b07acb7ab8f6d503db13c9", + "https://deno.land/x/libpkgx@v0.20.3/src/types.ts": "dc1a4e6458d11454282f832909838c56f786a26eed54fb8ab5675d6691ebf534", + "https://deno.land/x/libpkgx@v0.20.3/src/utils/Path.ts": "45303993a377363277e6c201160f36f1f9a5997632db03f473b618968d568e58", + "https://deno.land/x/libpkgx@v0.20.3/src/utils/host.ts": "3b9e0d4cb05f9bde0ee8bcb0f8557b0a339f6ef56dfb1f08b2cfa63b44db91ee", + "https://deno.land/x/libpkgx@v0.20.3/src/utils/pkg.ts": "e737cc9a98cd6a2797668c6ef856128692290256a521cc3906bd538410925451", + "https://deno.land/x/libpkgx@v0.20.3/src/utils/read-lines.ts": "6d947ccd5f8e48701ed9c5402b6ac5144df3fce60d666f19b6506edbc36c8367", + "https://deno.land/x/libpkgx@v0.20.3/src/utils/semver.ts": "da22a0e0cf74de792cc4d44c01ec5b767463816c8abb4b5fb86583ccda573298", + "https://deno.land/x/outdent@v0.8.0/mod.ts": "72630e680dcc36d5ae556fbff6900b12706c81a6fd592345fc98bcc0878fb3ca", + "https://deno.land/x/outdent@v0.8.0/src/index.ts": "6dc3df4108d5d6fedcdb974844d321037ca81eaaa16be6073235ff3268841a22", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/mod.ts": "14a69905ffad8064444c02d146008efeb6a0ddf0fe543483839af18e01684f5a", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/deps.ts": "f725c174d63a5c613387afa229046aafaedfd9eb59b07acb7ab8f6d503db13c9", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/hooks/useCache.ts": "9f3cc576fabae2caa6aedbf00ab12a59c732be1315471e5a475fef496c1e35ae", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/hooks/useCellar.ts": "c1e264fcb732423734f8c113fc7cb80c97befe8f13ed9d24906328bc5526c72d", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/hooks/useConfig.ts": "d5a02ee7a191fb4a2c3cd1721690ab6cf0338c9680847a9e9c4a6c9ea94df025", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/hooks/useDownload.ts": "3f9133486008146809508783b977e3480d0a43238ace27f78565fb9679aa9906", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/hooks/useFetch.ts": "ecf29342210b8eceed216e3bb73fcc7ea5b3ea5059686cf344ed190ca42ff682", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/hooks/useInventory.ts": "f459d819ab676a7e3786522d856b7670e994e4a755b0d1609b53c8b4ebe0c959", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/hooks/useMoustaches.ts": "e9166ddace759315782be0f570a4cd63c78e3b85592d59b75ddd33a0e401aa6b", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/hooks/useOffLicense.ts": "1c41ef6882512b67a47fcd1d1c0ce459906d6981a59f6be86d982594a7c26058", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/hooks/usePantry.ts": "113f3ac7cb6565425eebc7f1bd1ee52217f074865b46b452db79cc72d82e4d4a", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/hooks/useShellEnv.ts": "ae2388d3f15d2e03435df23a8392ace21d3d4f0c83b2575a9670ab7badc389c3", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/hooks/useSync.ts": "ea605a0eaa43ab9988d36dd6150e16dd911c4be45b7b0f2add6b236636bd517c", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/hooks/useSyncCache.ts": "30891e9d923f2c2b28f1ba220923221195b8261a4aeea18ef2676d93bd5da10d", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/plumbing/hydrate.ts": "c75f151ed307532ce9c2bf62c61e6478bb1132f95a11b848e02ea2dec08c2ff3", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/plumbing/install.ts": "2a4e19fae70fef7ba0be454fd5b7efed4d7d19a5141d26d3b26124ab792007ed", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/plumbing/link.ts": "0ed6198de737ebeab1704d375c732c9264fb0cfa7f2aedddb90f51d100174a73", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/plumbing/resolve.ts": "9425e0d201ee440a8dc011940046f0bb6d94aa29cd738e1a8c39ca86e55aad41", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/plumbing/which.ts": "f918211e561e56aabf6909e06fa10fa3be06ffebd9e7cc28ce57efef4faff27d", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/porcelain/install.ts": "85caffe3842ab63bf6d59c6c5c9fb93fbc95a0d5652488d93b95d865722b67b9", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/porcelain/run.ts": "55cc9124dca732e2f5557a8c451daebecb109c86b2f4347fa1e433aedf35ab5a", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/types.ts": "dc1a4e6458d11454282f832909838c56f786a26eed54fb8ab5675d6691ebf534", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/utils/Path.ts": "45303993a377363277e6c201160f36f1f9a5997632db03f473b618968d568e58", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/utils/error.ts": "b0d3130f5cdfc0cc8ea10f93fea0e7e97d4473ddc9bc527156b0fcf24c7b939c", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/utils/flock.ts": "5fd77f6b53c3a90888cf20a7726e9047aad2c766e4ec2fbf7cf2f916b98d99a4", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/utils/host.ts": "3b9e0d4cb05f9bde0ee8bcb0f8557b0a339f6ef56dfb1f08b2cfa63b44db91ee", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/utils/misc.ts": "a4d7944da07066e5dd2ef289af436dc7f1032aed4272811e9b19ceeed60b8491", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/utils/pkg.ts": "e737cc9a98cd6a2797668c6ef856128692290256a521cc3906bd538410925451", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/utils/read-lines.ts": "6d947ccd5f8e48701ed9c5402b6ac5144df3fce60d666f19b6506edbc36c8367", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/src/utils/semver.ts": "da22a0e0cf74de792cc4d44c01ec5b767463816c8abb4b5fb86583ccda573298", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/vendor/sqlite3@0.10.0/mod.ts": "7ce0a19f9cea3475cc94750ece61c20d857f1c3a279ad38cd029a3f8d9b7b03e", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/vendor/sqlite3@0.10.0/src/constants.ts": "85fd27aa6e199093f25f5f437052e16fd0e0870b96ca9b24a98e04ddc8b7d006", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/vendor/sqlite3@0.10.0/src/database.ts": "49569b0f279cfc3e42730002ae789a2694da74deb212e63a4b4e6640dc4d70ba", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/vendor/sqlite3@0.10.0/src/ffi.ts": "ddffcee178b3e72c45be385efd8b4434f7196cafe45a0046ae68df9af307c7f3", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/vendor/sqlite3@0.10.0/src/statement.ts": "2be7ffebbb72a031899dbf189972c5596aa73eabfc8a382a1bac9c5c111b0026", + "https://raw.githubusercontent.com/pkgxdev/libpkgx/refs/tags/v0.20.3/vendor/sqlite3@0.10.0/src/util.ts": "19815a492dd8f4c684587238dc20066de11782137de549cd4c9709d1b548247e" + } +} diff --git a/projects/amazon.com/aws/build.ts b/projects/amazon.com/aws/build.ts new file mode 100644 index 00000000..ec220ec3 --- /dev/null +++ b/projects/amazon.com/aws/build.ts @@ -0,0 +1,67 @@ +import { BuildOptions, env_include, Path, run, unarchive, undent } from "brewkit"; +import { walk } from "jsr:@std/fs@1/walk"; + +export default async function ({ prefix, version }: BuildOptions) { + await unarchive(`https://github.com/aws/aws-cli/archive/${version}.tar.gz`); + + let symlink_python = ""; + if (Deno.build.os != "darwin") { + env_include("llvm.org python~3.9"); + symlink_python = 'ln -sf "$(pkgx -q +python~3.9 -- which python)" "$d/python"'; + } else { + // if Xcode or XCLT are installed, use that python + symlink_python = undent` + xc_python="$(xcode-select -p)" + if [ "$xc_python" -a -f "$xc_python/usr/bin/python3" ]; then + ln -sf "$xc_python/usr/bin/python3" "$d/python" + else + ln -sf "$(pkgx -q +python~3.9 -- which python)" "$d/python" + fi`; + } + + run`python3 -m venv ${prefix}`; + run`${prefix}/bin/pip install --no-cache-dir .`; + + const content = Deno.readTextFileSync(`${prefix}/bin/aws`) + .split("\n").slice(1) // drop shebang + .join("\n"); + + // this trickery is necessary to have python use the venv + prefix.bin.rm("rf").mkdir().join("aws").write(undent` + #!/bin/sh + """:" + d="$(cd "$(dirname "$0")" && pwd)" + ${symlink_python} + exec "$d/python" "$0" "$@" + ":""" + + ${content}`).chmod(0o755); + + // reduce size as much as poss + const rms = []; + for await (const entry of walk(prefix.join("lib").string)) { + if (entry.isDirectory && entry.name.endsWith(".dist-info")) { + rms.push(entry.path); + } + if (entry.isFile && entry.name.endsWith(".h")) { + Deno.removeSync(entry.path); + } + if (entry.isDirectory && entry.name == "tests") { + rms.push(entry.path); + } + //FIXME the empty dir pruner should handle this but doesn’t + if (entry.isDirectory && entry.name == "__pycache__") { + rms.push(entry.path); + } + } + + // must do last or walk() throws + for (const rm of rms) { + new Path(rm).rm("rf"); + } + + prefix.join("lib/python3.9/site-packages/setuptools").rm("rf"); + prefix.join("lib/python3.9/site-packages/_distutils_hack").rm("rf"); + prefix.join("lib/python3.9/site-packages/pip").rm("rf"); + prefix.join("lib/python3.9/site-packages/pkg_resources").rm("rf"); +} diff --git a/projects/amazon.com/aws/package.yml b/projects/amazon.com/aws/package.yml new file mode 100644 index 00000000..038e0745 --- /dev/null +++ b/projects/amazon.com/aws/package.yml @@ -0,0 +1,12 @@ +name: + AWS CLI + +homepage: + https://aws.amazon.com/cli + +programs: + - bin/aws + +platforms: + - darwin + - linux diff --git a/projects/amazon.com/aws/sandbox.sb.in b/projects/amazon.com/aws/sandbox.sb.in new file mode 100644 index 00000000..41b0f8c7 --- /dev/null +++ b/projects/amazon.com/aws/sandbox.sb.in @@ -0,0 +1 @@ +(allow file-write* (subpath "{{prefix}}/bin/python")) diff --git a/projects/amazon.com/aws/test.ts b/projects/amazon.com/aws/test.ts new file mode 100644 index 00000000..d33f573e --- /dev/null +++ b/projects/amazon.com/aws/test.ts @@ -0,0 +1,5 @@ +import { run } from "brewkit"; + +export default function () { + run`aws --version`; +} diff --git a/projects/amazon.com/aws/versions.ts b/projects/amazon.com/aws/versions.ts new file mode 100644 index 00000000..4dbe5695 --- /dev/null +++ b/projects/amazon.com/aws/versions.ts @@ -0,0 +1,13 @@ +import { github, Range, semver } from "brewkit"; + +export default async function (constraint: Range) { + const rv = []; + const tags = await github.tags("aws/aws-cli"); + for (const { name: tag } of tags) { + const version = semver.parse(tag); + if (version) { + rv.push({ tag, version }); + } + } + return rv; +} diff --git a/projects/cmake.org/build.ts b/projects/cmake.org/build.ts new file mode 100644 index 00000000..7d0302d5 --- /dev/null +++ b/projects/cmake.org/build.ts @@ -0,0 +1,35 @@ +import { BuildOptions, run, unarchive } from "brewkit"; + +//TODO has more deps that it vendors +// use --sysmem-libs to use them + +export default async function ({ prefix, deps, version }: BuildOptions) { + await unarchive(`https://github.com/Kitware/CMake/releases/download/v${version}/cmake-${version}.tar.gz`); + + const extra_args = Deno.build.os == "linux" + ? `-DZLIB_LIBRARY=${deps["zlib.net"].prefix}/lib/libz.so + -DZLIB_INCLUDE_DIR=${deps["zlib.net"].prefix}/include + -DCURL_LIBRARY=${deps["curl.se"].prefix}/lib/libcurl.so + -DCURL_INCLUDE_DIR=${deps["curl.se"].prefix}/include + -DBZIP2_LIBRARIES=${deps["sourceware.org/bzip2"].prefix}/lib/libbz2.so + -DBZIP2_INCLUDE_DIR=${deps["sourceware.org/bzip2"].prefix}/include` + : ""; + + run`./bootstrap + --prefix=${prefix} + --docdir=/share/doc + --mandir=/share/man + --system-zlib + --system-curl + --system-bzip2 + -- + -DCMake_BUILD_LTO=ON + -DCMAKE_BUILD_TYPE=Release + -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON + ${extra_args} + `; + run`make --jobs ${navigator.hardwareConcurrency} install`; + + // we have a “docs are on the Internet” policy + prefix.share.join(`cmake-${version.marketing}/Help`).rm("rf"); +} diff --git a/projects/cmake.org/package.yml b/projects/cmake.org/package.yml new file mode 100644 index 00000000..36610e28 --- /dev/null +++ b/projects/cmake.org/package.yml @@ -0,0 +1,21 @@ +repository: + https://github.com/Kitware/CMake + +programs: + - bin/cmake + - bin/cpack + - bin/ctest + +darwin: + programs: + - bin/ccmake + +linux: + dependencies: + curl.se: ">=5" + zlib.net: ^1 + sourceware.org/bzip2: ^1 + +platforms: + - linux + - darwin diff --git a/projects/cmake.org/test.ts b/projects/cmake.org/test.ts new file mode 100644 index 00000000..e7d495d5 --- /dev/null +++ b/projects/cmake.org/test.ts @@ -0,0 +1,5 @@ +import { run } from "brewkit"; + +export default async function () { + run`cmake --version`; +} diff --git a/projects/cmake.org/versions.ts b/projects/cmake.org/versions.ts new file mode 100644 index 00000000..46e465e8 --- /dev/null +++ b/projects/cmake.org/versions.ts @@ -0,0 +1,7 @@ +import { github, Range } from "brewkit"; + +export default async function (constraint: Range) { + return (await github.releases("Kitware/CMake", constraint)).map( + github.std_version_covert, + ); +} diff --git a/projects/curl.se/build.ts b/projects/curl.se/build.ts new file mode 100644 index 00000000..b961a484 --- /dev/null +++ b/projects/curl.se/build.ts @@ -0,0 +1,32 @@ +import { BuildOptions, inreplace, run, unarchive } from "brewkit"; + +export default async function ({ prefix, version }: BuildOptions) { + await unarchive(`https://curl.se/download/curl-${version}.tar.bz2`); + + const args = [ + `--prefix=${prefix}`, + "--without-libpsl", //causes weird downstream bugs in php + ]; + + switch (Deno.build.os) { + case "linux": + args.push( + "--with-openssl", + "--with-ca-fallback", //uses openssl’s certs (which we control) + ); + break; + case "darwin": + args.push( + "--with-secure-transport", // use macOS’s native TLS + ); + } + + run`./configure ${args.join(" ")}`; + run`make --jobs ${navigator.hardwareConcurrency} install`; + + inreplace( + prefix.join("bin/curl-config"), + /^prefix='.+'$/gm, + 'prefix="$(cd "$(dirname "$0")"/.. && pwd)"', + ); +} diff --git a/projects/curl.se/ca-certs/README.md b/projects/curl.se/ca-certs/README.md new file mode 100644 index 00000000..75cea0f9 --- /dev/null +++ b/projects/curl.se/ca-certs/README.md @@ -0,0 +1,12 @@ +# `curl.se/ca-certs` + +CA Certificates are required to securely connect via HTTPS (among other +things). + +The cURL project assembles these CA Certificates via the Mozilla CA +certificate store into PEM format. + +Apple and Microsoft manage their own CA stores, so on those platforms we +configure packages to use them. + +On *nix we use this. diff --git a/projects/curl.se/ca-certs/build.ts b/projects/curl.se/ca-certs/build.ts new file mode 100644 index 00000000..f18acc39 --- /dev/null +++ b/projects/curl.se/ca-certs/build.ts @@ -0,0 +1,11 @@ +import { BuildOptions } from "brewkit"; + +export default async function build({ prefix, tag }: BuildOptions) { + const rsp = await fetch(`https://curl.se/ca/${tag}.pem`); + const dir = prefix.join("share").mkdir("p"); + using out = await Deno.open(dir.join("ca-certs.pem").string, { + create: true, + write: true, + }); + await rsp.body!.pipeTo(out.writable); +} diff --git a/projects/curl.se/ca-certs/package.yml b/projects/curl.se/ca-certs/package.yml new file mode 100644 index 00000000..4188fec1 --- /dev/null +++ b/projects/curl.se/ca-certs/package.yml @@ -0,0 +1,16 @@ +name: + CA Certficates + +homepage: + https://curl.se/docs/caextract.html + +brief: + Secure connection certificates + +description: + Certificates for authenticating and verifying secure TLS/SSL connections + +env: + SSL_CERT_FILE: "{{prefix}}/share/ca-certs.pem" + +platforms: "*" diff --git a/projects/curl.se/ca-certs/test.ts b/projects/curl.se/ca-certs/test.ts new file mode 100644 index 00000000..64032e89 --- /dev/null +++ b/projects/curl.se/ca-certs/test.ts @@ -0,0 +1,5 @@ +import { Path, run, TestOptions } from "brewkit"; + +export default async function ({ prefix }: TestOptions) { + run`pkgx openssl x509 -noout -text -in ${prefix.join("share/ca-certs.pem")}`; +} diff --git a/projects/curl.se/ca-certs/versions.ts b/projects/curl.se/ca-certs/versions.ts new file mode 100644 index 00000000..292fa9b9 --- /dev/null +++ b/projects/curl.se/ca-certs/versions.ts @@ -0,0 +1,20 @@ +import { SemVer } from "brewkit"; + +export default async function () { + const rsp = await fetch("https://curl.se/docs/caextract.html"); + const txt = await rsp.text(); + const rx = /cacert-(2\d+)-(\d+)-(\d+).pem/g; + + const rv = []; + for (const match of txt.matchAll(rx)) { + const [tag, major, minor, patch] = match; + rv.push({ + version: new SemVer( + `${parseInt(major)}.${parseInt(minor)}.${parseInt(patch)}`, + ), + tag: tag.replace(/.pem$/, ""), + }); + } + + return rv; +} diff --git a/projects/curl.se/package.yml b/projects/curl.se/package.yml new file mode 100644 index 00000000..c6db9739 --- /dev/null +++ b/projects/curl.se/package.yml @@ -0,0 +1,18 @@ +repository: + https://github.com/curl/curl + +name: + cURL + +linux: + dependencies: + openssl.org: ^3 + zlib.net: ^1.2.11 + +programs: + - bin/curl + - bin/curl-config + +platforms: + - linux + - darwin diff --git a/projects/curl.se/test.ts b/projects/curl.se/test.ts new file mode 100644 index 00000000..05df3c66 --- /dev/null +++ b/projects/curl.se/test.ts @@ -0,0 +1,9 @@ +import { backticks, run, TestOptions } from "brewkit"; +import { assertEquals } from "jsr:@std/assert@^1/equals"; + +export default async function ({ prefix }: TestOptions) { + run`curl -i pkgx.sh`; + run`curl --proto =https --tlsv1.2 -sSf https://get-ghcup.haskell.org`; + + assertEquals(await backticks`curl-config --prefix`, prefix.string); +} diff --git a/projects/curl.se/versions.ts b/projects/curl.se/versions.ts new file mode 100644 index 00000000..0ecad003 --- /dev/null +++ b/projects/curl.se/versions.ts @@ -0,0 +1,11 @@ +import { github, Range, SemVer } from "brewkit"; + +export default async function (constraint: Range) { + return (await github.releases("curl/curl", constraint)).map( + ({ tag_name: tag }) => { + const s = tag.replace(/^curl-/, "").replaceAll("_", "."); + const version = new SemVer(s); + return { version, tag }; + }, + ); +} diff --git a/projects/deno.com/build.ts b/projects/deno.com/build.ts new file mode 100644 index 00000000..4999d34d --- /dev/null +++ b/projects/deno.com/build.ts @@ -0,0 +1,6 @@ +import { BuildOptions, unarchive } from "brewkit"; + +export default async function ({ prefix, tag }: BuildOptions) { + await unarchive(`https://github.com/denoland/deno/releases/download/${tag}/deno-${Deno.build.target}.zip`); + prefix.bin.install("deno"); +} diff --git a/projects/deno.com/package.yml b/projects/deno.com/package.yml new file mode 100644 index 00000000..5d821718 --- /dev/null +++ b/projects/deno.com/package.yml @@ -0,0 +1,17 @@ +name: + deno + +repository: + https://github.com/denoland/deno + +programs: + - bin/deno + +linux: + companions: + - info-zip.org/unzip + +platforms: + - darwin + - linux + - windows diff --git a/projects/deno.com/test.ts b/projects/deno.com/test.ts new file mode 100644 index 00000000..047e56c3 --- /dev/null +++ b/projects/deno.com/test.ts @@ -0,0 +1,6 @@ +import { assertMatch } from "jsr:@std/assert@^1/match"; +import { backticks, TestOptions } from "brewkit"; + +export default async function ({ version }: TestOptions) { + assertMatch(await backticks`deno --version`, new RegExp(`^deno ${version}`)); +} diff --git a/projects/freedesktop.org/pkg-config/build.ts b/projects/freedesktop.org/pkg-config/build.ts new file mode 100644 index 00000000..f4361703 --- /dev/null +++ b/projects/freedesktop.org/pkg-config/build.ts @@ -0,0 +1,15 @@ +import { BuildOptions, run, unarchive } from "brewkit"; + +export default async function ({ prefix, tag }: BuildOptions) { + await unarchive(`https://pkgconfig.freedesktop.org/releases/${tag}.tar.gz`); + + Deno.env.set("CFLAGS", "-Wno-error=int-conversion"); + + run`./configure + --prefix=${prefix} + --disable-debug + --disable-host-tool + --with-internal-glib + --with-pc-path=/usr/lib/pkgconfig:/usr/share/pkgconfig:/usr/local/lib/pkgconfig`; + run`make --jobs ${navigator.hardwareConcurrency} install`; +} diff --git a/projects/freedesktop.org/pkg-config/package.yml b/projects/freedesktop.org/pkg-config/package.yml new file mode 100644 index 00000000..59914ebb --- /dev/null +++ b/projects/freedesktop.org/pkg-config/package.yml @@ -0,0 +1,15 @@ +name: + pkg-config + +repository: + https://gitlab.freedesktop.org/pkg-config/pkg-config + +homepage: + https://www.freedesktop.org/wiki/Software/pkg-config/ + +programs: + - bin/pkg-config + +platforms: + - darwin + - linux diff --git a/projects/freedesktop.org/pkg-config/test.ts b/projects/freedesktop.org/pkg-config/test.ts new file mode 100644 index 00000000..fc1ba5bf --- /dev/null +++ b/projects/freedesktop.org/pkg-config/test.ts @@ -0,0 +1,5 @@ +import { run } from "brewkit"; + +export default function () { + run`pkg-config --version`; +} diff --git a/projects/freedesktop.org/pkg-config/versions.ts b/projects/freedesktop.org/pkg-config/versions.ts new file mode 100644 index 00000000..914fb118 --- /dev/null +++ b/projects/freedesktop.org/pkg-config/versions.ts @@ -0,0 +1,6 @@ +import { SemVer } from "brewkit"; + +export default function () { + //TODO gitlab API + return [{ version: new SemVer("0.29.2"), tag: "pkg-config-0.29.2" }]; +} diff --git a/projects/git-scm.org/README.md b/projects/git-scm.org/README.md new file mode 100644 index 00000000..36c384e5 --- /dev/null +++ b/projects/git-scm.org/README.md @@ -0,0 +1,14 @@ +# Git + +## Configuration + +Though `~/.gitconfig` is the default, `git` will first look for +`${XDG_CONFIG_HOME:-$HOME/.config}/git/config` and use that if it exists which +allows XDG compliance. + +Our Git is otherwise configured to use `/etc/git` for system level +configuration. + +A “global” `gitignore` is sourced from +`${XDG_DATA_HOME:-$HOME/.local/share}/git/ignore`. + diff --git a/projects/git-scm.org/build.ts b/projects/git-scm.org/build.ts new file mode 100644 index 00000000..21595f70 --- /dev/null +++ b/projects/git-scm.org/build.ts @@ -0,0 +1,36 @@ +import { BuildOptions, inreplace, Path, run, unarchive } from "brewkit"; + +export default async function ({ prefix, version, props }: BuildOptions) { + await unarchive( + `https://mirrors.edge.kernel.org/pub/software/scm/git/git-${version}.tar.xz`, + ); + + props.join("config.mak").cp({ into: Path.cwd() }); + + run`./configure --prefix=${prefix}`; + run`make install + --jobs ${navigator.hardwareConcurrency} + template_dir=share/git-core/templates + NO_TCLTK=1 + INSTALL_STRIP=-s + `; + // ^^ setting `template_dir` is necessary or the resulting binary fails to be relocatable for eg. `git clone` + run`make --directory contrib/subtree install`; + + Path.cwd().join("contrib/subtree/git-subtree").mv({ + into: prefix.join("bin"), + }); + + if (Deno.build.os == "darwin") { + run`make --directory contrib/credential/osxkeychain`; + + Path.cwd().join("contrib/credential/osxkeychain/git-credential-osxkeychain") + .mv({ into: prefix.join("bin") }); + + inreplace( + prefix.join("libexec/git-core/git-instaweb"), + "#!/usr/bin/env python", + "#!/usr/bin/python3", + ); + } +} diff --git a/projects/git-scm.org/config.mak b/projects/git-scm.org/config.mak new file mode 100644 index 00000000..d20025ac --- /dev/null +++ b/projects/git-scm.org/config.mak @@ -0,0 +1,5 @@ +RUNTIME_PREFIX = YesPlease +RUNTIME_PREFIX_PERL = YesPlease + +# must be set or the above are still abs paths +gitexecdir=libexec/git-core diff --git a/projects/git-scm.org/package.yml b/projects/git-scm.org/package.yml new file mode 100644 index 00000000..965ad61b --- /dev/null +++ b/projects/git-scm.org/package.yml @@ -0,0 +1,21 @@ +repository: + https://github.com/git/git + +name: + Git + +# we ignore the other programs as they are mostly misplaced in `bin` rather +# than libexec which is where all the other `git-foo` programs are. +programs: + - git + +# indeed macOS has no deps, notably CommonCrypto instead of OpenSSL +linux: + dependencies: + curl.se: ">=5" +# gnu.org/gettext^0.21 +# libexpat.github.io + +platforms: + - darwin + - linux diff --git a/projects/git-scm.org/test.ts b/projects/git-scm.org/test.ts new file mode 100644 index 00000000..f0645fc5 --- /dev/null +++ b/projects/git-scm.org/test.ts @@ -0,0 +1,24 @@ +import { assert } from "jsr:@std/assert@^1"; +import { run, test_utils } from "brewkit"; +const { tmp, getstderr, asset_stdout } = test_utils; + +export default async function () { + await tmp(async () => { + const stderr = await getstderr("git clone https://github.com/pkgxdev/pkgo"); + assert(!stderr.includes("warning: templates not found in")); + }); + + await tmp(async (cwd) => { + cwd.join("testfile").touch(); + run`git init`; + run`git add .`; + asset_stdout("git diff --name-only --cached", "testfile"); + + run`git config user.email me@example.com`; + run`git config user.name MyName`; + run`git config commit.gpgSign false`; + run`git commit --message MyMessage`; + + run`git subtree add --prefix teaxyz-subtree https://github.com/pkgxdev/pkgo main --squash`; + }); +} diff --git a/projects/git-scm.org/versions.ts b/projects/git-scm.org/versions.ts new file mode 100644 index 00000000..870060ef --- /dev/null +++ b/projects/git-scm.org/versions.ts @@ -0,0 +1,19 @@ +import { Range } from "jsr:@std/semver@^1"; +import { github } from "brewkit"; + +export default async function (constraint: Range) { + return (await github.tags("git/git")).map((x) => { + try { + if (!x.name.startsWith("v") || /-.*$/.test(x.name)) { + // ignore tags for other products eg. gitgui + return; + } + return github.std_version_covert(x); + } catch (err) { + if (!/^v[01]/.test(x.name)) { + // ^^ ignore weird old tags + throw err; + } + } + }).filter((x) => x); +} diff --git a/projects/github.com/jqlang/jq/build.ts b/projects/github.com/jqlang/jq/build.ts new file mode 100644 index 00000000..4b24ee95 --- /dev/null +++ b/projects/github.com/jqlang/jq/build.ts @@ -0,0 +1,9 @@ +import { BuildOptions, run, unarchive } from "brewkit"; + +export default async function ({ prefix, tag }: BuildOptions) { + await unarchive(`https://github.com/jqlang/jq/releases/download/${tag}/${tag}.tar.gz`); + + run`./configure --disable-maintainer-mode --prefix=${prefix}`; + run`make -j ${navigator.hardwareConcurrency}`; + run`make install`; +} diff --git a/projects/github.com/jqlang/jq/package.yml b/projects/github.com/jqlang/jq/package.yml new file mode 100644 index 00000000..50c7cf1c --- /dev/null +++ b/projects/github.com/jqlang/jq/package.yml @@ -0,0 +1,12 @@ +name: + jq + +repository: + https://github.com/stedolan/jq + +programs: + - bin/jq + +platforms: + - darwin + - linux diff --git a/projects/github.com/jqlang/jq/test.json b/projects/github.com/jqlang/jq/test.json new file mode 100644 index 00000000..34c480e9 --- /dev/null +++ b/projects/github.com/jqlang/jq/test.json @@ -0,0 +1,12 @@ +{ + "devs": [ + { + "name": "Max Howell", + "github": "mxcl" + }, + { + "name": "Jacob Heider", + "github": "jhheider" + } + ] +} diff --git a/projects/github.com/jqlang/jq/test.ts b/projects/github.com/jqlang/jq/test.ts new file mode 100644 index 00000000..37c46b5e --- /dev/null +++ b/projects/github.com/jqlang/jq/test.ts @@ -0,0 +1,10 @@ +import { assertEquals } from "jsr:@std/assert@^1/equals"; + +export default async function () { + const proc = new Deno.Command("jq", { args: [".devs[1].github"], stdin: "piped", stdout: "piped" }).spawn(); + const stdin = proc.stdin.getWriter(); + stdin.write(new TextEncoder().encode(Deno.readTextFileSync("test.json"))); + stdin.close(); + const out = new TextDecoder().decode((await proc.output()).stdout); + assertEquals(out, '"jhheider"\n'); +} diff --git a/projects/github.com/libexpat/build.ts b/projects/github.com/libexpat/build.ts new file mode 100644 index 00000000..f1347693 --- /dev/null +++ b/projects/github.com/libexpat/build.ts @@ -0,0 +1,10 @@ +import { BuildOptions, run } from "brewkit"; +import unarchive from "../../../brewkit/unarchive.ts"; + +export default async function ({ prefix, tag, version }: BuildOptions) { + await unarchive( + `https://github.com/libexpat/libexpat/releases/download/${tag}/expat-${version}.tar.xz`, + ); + run`./configure --prefix=${prefix} --disable-debug`; + run`make --jobs ${navigator.hardwareConcurrency} install`; +} diff --git a/projects/github.com/libexpat/package.yml b/projects/github.com/libexpat/package.yml new file mode 100644 index 00000000..efa8d714 --- /dev/null +++ b/projects/github.com/libexpat/package.yml @@ -0,0 +1,12 @@ +name: + Expat + +repository: + https://github.com/libexpat/libexpat + +programs: + - bin/xmlwf + +platforms: + - darwin + - linux diff --git a/projects/github.com/libexpat/test.c b/projects/github.com/libexpat/test.c new file mode 100644 index 00000000..e572a30f --- /dev/null +++ b/projects/github.com/libexpat/test.c @@ -0,0 +1,27 @@ +#include +#include "expat.h" +static void XMLCALL my_StartElementHandler( + void *userdata, + const XML_Char *name, + const XML_Char **atts) +{ + printf("tag:%s|", name); +} +static void XMLCALL my_CharacterDataHandler( + void *userdata, + const XML_Char *s, + int len) +{ + printf("data:%.*s|", len, s); +} +int main() +{ + static const char str[] = "Hello, world!"; + int result; + XML_Parser parser = XML_ParserCreate("utf-8"); + XML_SetElementHandler(parser, my_StartElementHandler, NULL); + XML_SetCharacterDataHandler(parser, my_CharacterDataHandler); + result = XML_Parse(parser, str, sizeof(str), 1); + XML_ParserFree(parser); + return result; +} diff --git a/projects/github.com/libexpat/test.ts b/projects/github.com/libexpat/test.ts new file mode 100644 index 00000000..500d4e0d --- /dev/null +++ b/projects/github.com/libexpat/test.ts @@ -0,0 +1,8 @@ +import { backticks, run } from "brewkit"; +import { assertEquals } from "jsr:@std/assert@^1/equals"; + +export default async function test() { + run`cc test.c -lexpat`; + const out = await backticks`./a.out`; + assertEquals(out, "tag:str|data:Hello, world!|"); +} diff --git a/projects/github.com/libexpat/versions.ts b/projects/github.com/libexpat/versions.ts new file mode 100644 index 00000000..1e6c58e8 --- /dev/null +++ b/projects/github.com/libexpat/versions.ts @@ -0,0 +1,12 @@ +import { github, Range, semver } from "brewkit"; + +export default async function (constraint: Range) { + return (await github.releases("libexpat/libexpat", constraint)).compact( + ({ tag_name: tag }) => { + const version = semver.parse(tag.replace(/^R_/, "").replaceAll("_", ".")); + if (version) { + return { version, tag }; + } + }, + ); +} diff --git a/projects/gnu.org/bison/build.ts b/projects/gnu.org/bison/build.ts new file mode 100644 index 00000000..5f96f8eb --- /dev/null +++ b/projects/gnu.org/bison/build.ts @@ -0,0 +1,13 @@ +import { BuildOptions, run, unarchive } from "brewkit"; +import env_include from "../../../brewkit/env-include.ts"; + +export default async function ({ prefix, tag }: BuildOptions) { + await unarchive(`https://ftp.gnu.org/gnu/bison/bison-${tag.slice(1)}.tar.xz`); + + if (Deno.build.os != "darwin") { + env_include("gnu.org/m4"); + } + + run`./configure --prefix=${prefix} --enable-relocatable`; + run`make --jobs ${navigator.hardwareConcurrency} install`; +} diff --git a/projects/gnu.org/bison/package.yml b/projects/gnu.org/bison/package.yml new file mode 100644 index 00000000..636209fc --- /dev/null +++ b/projects/gnu.org/bison/package.yml @@ -0,0 +1,12 @@ +name: + GNU Bison + +homepage: + https://www.gnu.org/software/bison/ + +repository: + https://github.com/akimd/bison + +programs: + - bin/bison + - bin/yacc diff --git a/projects/gnu.org/bison/versions.ts b/projects/gnu.org/bison/versions.ts new file mode 100644 index 00000000..3bfc738d --- /dev/null +++ b/projects/gnu.org/bison/versions.ts @@ -0,0 +1,13 @@ +import { github, semver } from "brewkit"; + +export default async function () { + return (await github.tags("akimd/bison")).compact(({ name: tag }) => { + const version = semver.parse(tag.replace(/^gcc-/, "")); + if (version) { + return { + tag, + version, + }; + } + }); +} diff --git a/projects/gnu.org/gettext/build.ts b/projects/gnu.org/gettext/build.ts new file mode 100644 index 00000000..661b15fe --- /dev/null +++ b/projects/gnu.org/gettext/build.ts @@ -0,0 +1,19 @@ +import { BuildOptions, inreplace, run, unarchive } from "brewkit"; + +export default async function ({ prefix, tag }: BuildOptions) { + await unarchive(`https://ftp.gnu.org/gnu/gettext/gettext-${tag.slice(1)}.tar.gz`); + run`./configure + --prefix=${prefix} + --disable-debug + --with-included-debug + --with-included-libcroco + --with-included-libunistring + --without-included-libxml + --disable-java + --disable-csharp`; + run`make --jobs ${navigator.hardwareConcurrency}`; + run`make install`; + + inreplace(prefix.bin.join("gettextize"), prefix.string, '"$(cd "$(dirname "$0")/.." && pwd)"'); + inreplace(prefix.bin.join("autopoint"), prefix.string, '"$(cd "$(dirname "$0")/.." && pwd)"'); +} diff --git a/projects/gnu.org/gettext/package.yml b/projects/gnu.org/gettext/package.yml new file mode 100644 index 00000000..a820a1a3 --- /dev/null +++ b/projects/gnu.org/gettext/package.yml @@ -0,0 +1,38 @@ +name: GNU gettext + +homepage: + https://www.gnu.org/software/gettext/ + +#dependencies: +#gnome.org/libxml2: 2 +#TODO ncurses +companions: + - tukaani.org/xz # autopoint needs this to unpack archives + +programs: + - bin/autopoint + - bin/envsubst + - bin/gettext + - bin/gettext.sh + - bin/gettextize + - bin/msgattrib + - bin/msgcat + - bin/msgcmp + - bin/msgcomm + - bin/msgconv + - bin/msgen + - bin/msgexec + - bin/msgfilter + - bin/msgfmt + - bin/msggrep + - bin/msginit + - bin/msgmerge + - bin/msgunfmt + - bin/msguniq + - bin/ngettext + - bin/recode-sr-latin + - bin/xgettext + +platforms: + - darwin + - linux diff --git a/projects/gnu.org/gettext/test.ts b/projects/gnu.org/gettext/test.ts new file mode 100644 index 00000000..202fdcb6 --- /dev/null +++ b/projects/gnu.org/gettext/test.ts @@ -0,0 +1,6 @@ +import { assertEquals } from "jsr:@std/assert@^1/equals"; +import { backticks } from "brewkit"; + +export default async function () { + assertEquals(await backticks`gettext test`, "test"); +} diff --git a/projects/gnu.org/gettext/versions.ts b/projects/gnu.org/gettext/versions.ts new file mode 100644 index 00000000..b7201073 --- /dev/null +++ b/projects/gnu.org/gettext/versions.ts @@ -0,0 +1,13 @@ +import { github, SemVer, semver } from "brewkit"; + +export default async function () { + return (await github.tags("autotools-mirror/gettext")).compact(({ name: tag }) => { + const version = semver.parse(tag); + if (version) { + return { + tag, + version, + }; + } + }); +} diff --git a/projects/gnu.org/gmp/build.ts b/projects/gnu.org/gmp/build.ts new file mode 100644 index 00000000..95581a1c --- /dev/null +++ b/projects/gnu.org/gmp/build.ts @@ -0,0 +1,17 @@ +import { BuildOptions, env_include, run, unarchive } from "brewkit"; + +export default async function ({ prefix, tag }: BuildOptions) { + await unarchive(`https://gmplib.org/download/gmp/${tag}.tar.bz2`); + + if (Deno.build.os != "darwin") { + env_include("gnu.org/m4"); + } + + run`./configure + --enable-cxx + --with-pic + --prefix=${prefix}`; + run`make --jobs ${navigator.hardwareConcurrency}`; + run`make --jobs ${navigator.hardwareConcurrency} check`; + run`make --jobs ${navigator.hardwareConcurrency} install`; +} diff --git a/projects/gnu.org/gmp/package.yml b/projects/gnu.org/gmp/package.yml new file mode 100644 index 00000000..22e8f282 --- /dev/null +++ b/projects/gnu.org/gmp/package.yml @@ -0,0 +1,9 @@ +name: + GMP + +homepage: + https://gmplib.org + +platforms: + - darwin + - linux diff --git a/projects/gnu.org/gmp/versions.ts b/projects/gnu.org/gmp/versions.ts new file mode 100644 index 00000000..ba6c06a0 --- /dev/null +++ b/projects/gnu.org/gmp/versions.ts @@ -0,0 +1,14 @@ +import { SemVer } from "brewkit"; + +export default async function () { + const rsp = await fetch("https://gmplib.org/download/gmp/"); + const txt = await rsp.text(); + const matches = txt.matchAll(/(gmp-(\d+(\.\d+)+))\.tar\.bz2/mg); + const rv = []; + for (const match of matches) { + const tag = match[1]; + const version = new SemVer(match[2]); + rv.push({ version, tag }); + } + return rv; +} diff --git a/projects/gnu.org/libtool/build.ts b/projects/gnu.org/libtool/build.ts new file mode 100644 index 00000000..942e3ad2 --- /dev/null +++ b/projects/gnu.org/libtool/build.ts @@ -0,0 +1,20 @@ +import { BuildOptions, inreplace, run, unarchive } from "brewkit"; +import env_include from "../../../brewkit/env-include.ts"; + +export default async function ({ prefix, tag }: BuildOptions) { + await unarchive(`https://ftp.gnu.org/gnu/libtool/libtool-${tag}.tar.gz`); + + if (Deno.build.os == "linux") { + await env_include("m4"); + } + + run`./configure --prefix=${prefix} --disable-debug`; + run`make --jobs ${navigator.hardwareConcurrency} install`; + + inreplace(prefix.bin.join("libtoolize"), `prefix="${prefix}"`, 'prefix="$(cd "$(dirname "$0")/.." && pwd)"'); + inreplace(prefix.bin.join("libtoolize"), prefix.string, "$prefix"); + + // commonly expected prefixes + prefix.bin.join("glibtool").ln("s", { target: "libtool" }); + prefix.bin.join("glibtoolize").ln("s", { target: "libtoolize" }); +} diff --git a/projects/gnu.org/libtool/package.yml b/projects/gnu.org/libtool/package.yml new file mode 100644 index 00000000..6c4bf584 --- /dev/null +++ b/projects/gnu.org/libtool/package.yml @@ -0,0 +1,13 @@ +name: + GNU Libtool + +homepage: + https://www.gnu.org/software/libtool/ + +programs: + - bin/libtool + - bin/libtoolize + +platforms: + - darwin + - linux diff --git a/projects/gnu.org/libtool/test.ts b/projects/gnu.org/libtool/test.ts new file mode 100644 index 00000000..cd47086a --- /dev/null +++ b/projects/gnu.org/libtool/test.ts @@ -0,0 +1,7 @@ +import { assert } from "jsr:@std/assert@^1"; +import { Path, run } from "brewkit"; + +export default async function () { + run`libtoolize`; + assert(Path.cwd().join("ltmain.sh").exists()); +} diff --git a/projects/gnu.org/libtool/versions.ts b/projects/gnu.org/libtool/versions.ts new file mode 100644 index 00000000..4c69ee2d --- /dev/null +++ b/projects/gnu.org/libtool/versions.ts @@ -0,0 +1,14 @@ +import { SemVer } from "brewkit"; + +export default async function () { + const rsp = await fetch("https://ftp.gnu.org/gnu/libtool/"); + const txt = await rsp.text(); + const matches = txt.matchAll(/libtool-(\d+(\.\d+)+)\.tar\.gz/mg); + const rv = []; + for (const match of matches) { + const tag = match[1]; + const version = new SemVer(match[1]); + rv.push({ version, tag }); + } + return rv; +} diff --git a/projects/gnu.org/m4/build.ts b/projects/gnu.org/m4/build.ts new file mode 100644 index 00000000..0c0239b6 --- /dev/null +++ b/projects/gnu.org/m4/build.ts @@ -0,0 +1,7 @@ +import { BuildOptions, run, unarchive } from "brewkit"; + +export default async function ({ prefix, version }: BuildOptions) { + await unarchive(`https://ftp.gnu.org/gnu/m4/m4-${version}.tar.xz`); + run`./configure --prefix=${prefix}`; + run`make --jobs ${navigator.hardwareConcurrency} install`; +} diff --git a/projects/gnu.org/m4/package.yml b/projects/gnu.org/m4/package.yml new file mode 100644 index 00000000..a47842a1 --- /dev/null +++ b/projects/gnu.org/m4/package.yml @@ -0,0 +1,15 @@ +name: + GNU m4 + +repository: + https://git.savannah.gnu.org/r/m4.git/ + +homepage: + https://www.gnu.org/software/m4/ + +programs: + - bin/m4 + +platforms: + - darwin + - linux diff --git a/projects/gnu.org/m4/test.ts b/projects/gnu.org/m4/test.ts new file mode 100644 index 00000000..dc4fc673 --- /dev/null +++ b/projects/gnu.org/m4/test.ts @@ -0,0 +1,10 @@ +import { assertEquals } from "jsr:@std/assert@^1/equals"; + +export default async function () { + const proc = new Deno.Command("m4", { stdin: "piped", stdout: "piped" }).spawn(); + const stdin = proc.stdin.getWriter(); + stdin.write(new TextEncoder().encode(`define(TEST, pkgx.dev)TEST`)); + stdin.close(); + const stdout = new TextDecoder().decode((await proc.output()).stdout); + assertEquals(stdout, "pkgx.dev"); +} diff --git a/projects/gnu.org/m4/versions.ts b/projects/gnu.org/m4/versions.ts new file mode 100644 index 00000000..b783f2df --- /dev/null +++ b/projects/gnu.org/m4/versions.ts @@ -0,0 +1,16 @@ +import { github, semver } from "brewkit"; + +export default async function () { + return (await github.tags("autotools-mirror/m4")).compact(({ name }) => { + if (/[a-z]$/.test(name)) return; + if (/^m4-/.test(name)) name = name.slice(3); + if (/^release-/.test(name)) name = name.slice(8); + const version = semver.parse(name); + if (version) { + return { + tag: name, + version, + }; + } + }); +} diff --git a/projects/gnu.org/make/build.ts b/projects/gnu.org/make/build.ts new file mode 100644 index 00000000..2491bd98 --- /dev/null +++ b/projects/gnu.org/make/build.ts @@ -0,0 +1,7 @@ +import { BuildOptions, run, unarchive } from "brewkit"; + +export default async function ({ prefix, tag }: BuildOptions) { + await unarchive(`https://ftp.gnu.org/gnu/make/make-${tag}.tar.gz`); + run`./configure --prefix=${prefix}`; + run`make --jobs ${navigator.hardwareConcurrency} install`; +} diff --git a/projects/gnu.org/make/package.yml b/projects/gnu.org/make/package.yml new file mode 100644 index 00000000..e0c31b74 --- /dev/null +++ b/projects/gnu.org/make/package.yml @@ -0,0 +1,9 @@ +name: + GNU Make + +programs: + - bin/make + +platforms: + - darwin + - linux diff --git a/projects/gnu.org/make/test.ts b/projects/gnu.org/make/test.ts new file mode 100644 index 00000000..4101237e --- /dev/null +++ b/projects/gnu.org/make/test.ts @@ -0,0 +1,9 @@ +import { assertEquals } from "jsr:@std/assert@^1/equals"; +import { run } from "brewkit"; + +export default async function () { + Deno.writeTextFileSync("Makefile", "foo:\n\techo bar > $@"); + run`make`; + assertEquals(Deno.readTextFileSync("foo"), "bar\n"); + run`make --question`; +} diff --git a/projects/gnu.org/make/versions.ts b/projects/gnu.org/make/versions.ts new file mode 100644 index 00000000..af034835 --- /dev/null +++ b/projects/gnu.org/make/versions.ts @@ -0,0 +1,14 @@ +import { SemVer } from "brewkit"; + +export default async function () { + const rsp = await fetch("https://ftp.gnu.org/gnu/make/"); + const txt = await rsp.text(); + const matches = txt.matchAll(/make-(\d+(\.\d+)+)\.tar\.gz/mg); + const rv = []; + for (const match of matches) { + const tag = match[1]; + const version = new SemVer(match[1]); + rv.push({ version, tag }); + } + return rv; +} diff --git a/projects/gnu.org/patch/build.ts b/projects/gnu.org/patch/build.ts new file mode 100644 index 00000000..af9923e6 --- /dev/null +++ b/projects/gnu.org/patch/build.ts @@ -0,0 +1,7 @@ +import { BuildOptions, run, unarchive } from "brewkit"; + +export default async function ({ prefix, tag }: BuildOptions) { + await unarchive(`https://ftp.gnu.org/gnu/patch/patch-${tag}.tar.gz`); + run`./configure --prefix=${prefix}`; + run`make --jobs ${navigator.hardwareConcurrency} install`; +} diff --git a/projects/gnu.org/patch/package.yml b/projects/gnu.org/patch/package.yml new file mode 100644 index 00000000..7418e158 --- /dev/null +++ b/projects/gnu.org/patch/package.yml @@ -0,0 +1,19 @@ +distributable: + url: https://ftp.gnu.org/gnu/patch/patch-{{version}}.tar.gz + strip-components: 1 + +versions: + url: https://ftp.gnu.org/gnu/patch/ + match: /patch-(\d+\.\d+(\.\d+)?)\.tar\.gz/ + strip: + - /patch-/ + - /.tar.gz/ + +name: GNU Patch + +programs: + - bin/patch + +platforms: + - darwin + - linux diff --git a/projects/gnu.org/patch/test.ts b/projects/gnu.org/patch/test.ts new file mode 100644 index 00000000..25fba8a7 --- /dev/null +++ b/projects/gnu.org/patch/test.ts @@ -0,0 +1,20 @@ +import { assert, assertEquals } from "jsr:@std/assert@^1"; + +export default async function () { + Deno.writeTextFileSync("./sample", "hello\n"); + + const proc = new Deno.Command("patch", { args: ["./sample"], stdin: "piped" }).spawn(); + const stdin = proc.stdin.getWriter(); + const data = ` +1c1 +< hello +--- +> goodbye +`; + await stdin.write(new TextEncoder().encode(data)); + await stdin.close(); + const { success } = await proc.status; + + assert(success); + assertEquals(Deno.readTextFileSync("./sample"), "goodbye\n"); +} diff --git a/projects/gnu.org/patch/versions.ts b/projects/gnu.org/patch/versions.ts new file mode 100644 index 00000000..8d7ed9e4 --- /dev/null +++ b/projects/gnu.org/patch/versions.ts @@ -0,0 +1,14 @@ +import { SemVer } from "brewkit"; + +export default async function () { + const rsp = await fetch("https://ftp.gnu.org/gnu/patch/"); + const txt = await rsp.text(); + const matches = txt.matchAll(/patch-(\d+(\.\d+)+)\.tar\.gz/mg); + const rv = []; + for (const match of matches) { + const tag = match[1]; + const version = new SemVer(match[1]); + rv.push({ version, tag }); + } + return rv; +} diff --git a/projects/gnu.org/readline/build.ts b/projects/gnu.org/readline/build.ts new file mode 100644 index 00000000..64d15c7a --- /dev/null +++ b/projects/gnu.org/readline/build.ts @@ -0,0 +1,9 @@ +import { BuildOptions, run } from "brewkit"; +import unarchive from "../../../brewkit/unarchive.ts"; + +export default async function ({ prefix, tag }: BuildOptions) { + await unarchive(`https://ftp.gnu.org/gnu/readline/readline-${tag}.tar.gz`); + + run`./configure --prefix=${prefix} --with-curses`; + run`make --jobs ${navigator.hardwareConcurrency} install`; +} diff --git a/projects/gnu.org/readline/package.yml b/projects/gnu.org/readline/package.yml new file mode 100644 index 00000000..10dfbcd6 --- /dev/null +++ b/projects/gnu.org/readline/package.yml @@ -0,0 +1,16 @@ +name: + GNU Readline + +homepage: + https://www.gnu.org/software/readline/ + +repository: + https://git.savannah.gnu.org/cgit/readline.git/ + +linux: + dependencies: + invisible-island.net/ncurses: ^6 + +platforms: + - darwin + - linux diff --git a/projects/gnu.org/readline/test.c b/projects/gnu.org/readline/test.c new file mode 100644 index 00000000..5b769d94 --- /dev/null +++ b/projects/gnu.org/readline/test.c @@ -0,0 +1,7 @@ +#include +#include +#include +int main() { + printf("%s\\n", readline("test> ")); + return 0; +} diff --git a/projects/gnu.org/readline/test.ts b/projects/gnu.org/readline/test.ts new file mode 100644 index 00000000..0317a916 --- /dev/null +++ b/projects/gnu.org/readline/test.ts @@ -0,0 +1,36 @@ +import { backticks, run, TestOptions } from "brewkit"; +import { assertEquals } from "jsr:@std/assert@^1/equals"; +import { assertMatch } from "jsr:@std/assert@^1/match"; + +export default async function ({ prefix }: TestOptions) { + switch (Deno.build.os) { + case "linux": + assertMatch( + await backticks`ldd ${prefix}/lib/libreadline.so`, + /ncurses/, + ); + run`cc -lreadline -lncurses -ltinfo test.c`; + break; + case "darwin": + assertMatch( + await backticks`otool -L ${prefix}/lib/libreadline.dylib`, + /ncurses/, + ); + run`cc -lreadline -lncurses test.c`; + break; + } + + const proc = new Deno.Command("./a.out", { stdin: "piped", stdout: "piped" }) + .spawn(); + + const writer = await proc.stdin.getWriter(); + writer.write(new TextEncoder().encode("Hello, World!\n")); + writer.close(); + + const out = new TextDecoder().decode((await proc.output()).stdout); + assertEquals( + out, + `test> Hello, World! +Hello, World!\\n`, + ); +} diff --git a/projects/gnu.org/readline/versions.ts b/projects/gnu.org/readline/versions.ts new file mode 100644 index 00000000..57d61560 --- /dev/null +++ b/projects/gnu.org/readline/versions.ts @@ -0,0 +1,14 @@ +import { SemVer } from "brewkit"; + +export default async function () { + const rsp = await fetch("https://ftp.gnu.org/gnu/readline/"); + const txt = await rsp.text(); + const matches = txt.matchAll(/readline-(\d+(\.\d+)+)\.tar\.gz/mg); + const rv = []; + for (const match of matches) { + const tag = match[1]; + const version = new SemVer(match[1]); + rv.push({ version, tag }); + } + return rv; +} diff --git a/projects/info-zip.org/unzip/build.ts b/projects/info-zip.org/unzip/build.ts new file mode 100644 index 00000000..bb26f81a --- /dev/null +++ b/projects/info-zip.org/unzip/build.ts @@ -0,0 +1,12 @@ +import { BuildOptions, run } from "brewkit"; +import unarchive from "../../../brewkit/unarchive.ts"; + +export default async function ({ prefix, version }: BuildOptions) { + const M = version.major; + const Mm = `${M}${version.minor}`; + await unarchive( + `https://cytranet.dl.sourceforge.net/project/infozip/UnZip ${version.major}.x (latest)/UnZip ${version.marketing}/unzip${version.major}${version.minor}.tar.gz`, + ); + run`make --file unix/Makefile --jobs ${navigator.hardwareConcurrency} macosx`; + run`make prefix=${prefix} install`; +} diff --git a/projects/info-zip.org/unzip/package.yml b/projects/info-zip.org/unzip/package.yml new file mode 100644 index 00000000..574ef638 --- /dev/null +++ b/projects/info-zip.org/unzip/package.yml @@ -0,0 +1,16 @@ +name: + unzip + +homepage: + https://info-zip.org + +programs: + - bin/unzip + - bin/funzip + - bin/unzipsfx + - bin/zipgrep + - bin/zipinfo + +platforms: + - darwin + - linux diff --git a/projects/info-zip.org/unzip/test.ts b/projects/info-zip.org/unzip/test.ts new file mode 100644 index 00000000..83e460e2 --- /dev/null +++ b/projects/info-zip.org/unzip/test.ts @@ -0,0 +1,5 @@ +import { run } from "brewkit"; + +export default function () { + run`unzip -h`; +} diff --git a/projects/info-zip.org/unzip/versions.ts b/projects/info-zip.org/unzip/versions.ts new file mode 100644 index 00000000..80221f96 --- /dev/null +++ b/projects/info-zip.org/unzip/versions.ts @@ -0,0 +1,5 @@ +import { SemVer } from "brewkit"; + +export default function () { + return [{ version: new SemVer("6.0.0"), tag: "6.0" }]; +} diff --git a/projects/invisible-island.net/ncurses/build.ts b/projects/invisible-island.net/ncurses/build.ts new file mode 100644 index 00000000..adbec314 --- /dev/null +++ b/projects/invisible-island.net/ncurses/build.ts @@ -0,0 +1,37 @@ +import { BuildOptions, inreplace, run, unarchive } from "brewkit"; + +export default async function ({ prefix, tag }: BuildOptions) { + await unarchive( + `https://ftp.gnu.org/gnu/ncurses/ncurses-${tag.replace(/^v/, "")}.tar.gz`, + ); + + run`./configure + --prefix=${prefix} + --disable-debug + --enable-pc-files + --enable-sigwinch + --enable-widec + --with-shared + --with-cxx-shared + --with-gpm=no + --without-ada + --with-pkg-config-libdir=${prefix}/lib/pkgconfig + --with-termlib + --with-terminfo-dirs=/usr/share/terminfo:/usr/local/share/terminfo + --without-tests + `; + + run`make --jobs ${navigator.hardwareConcurrency} install`; + + inreplace( + prefix.join("bin/ncursesw6-config"), + `prefix="${prefix}"`, + `prefix="$(cd "$(dirname "$0")/.." && pwd)"`, + ); + + inreplace( + prefix.join("bin/ncursesw6-config"), + prefix.string, + "$prefix", + ); +} diff --git a/projects/invisible-island.net/ncurses/package.yml b/projects/invisible-island.net/ncurses/package.yml new file mode 100644 index 00000000..bb99c2f7 --- /dev/null +++ b/projects/invisible-island.net/ncurses/package.yml @@ -0,0 +1,26 @@ +name: + ncurses + +homepage: + https://invisible-island.net/ncurses/ + +programs: + - bin/captoinfo + - bin/clear + - bin/infocmp + - bin/infotocap + - bin/ncursesw6-config + - bin/reset + - bin/tabs + - bin/tic + - bin/toe + - bin/tput + - bin/tset + +env: + TERMINFO_DIRS: /usr/share/terminfo:{{prefix}}/share/terminfo:$TERMINFO_DIRS + # ^^ we delegate to the system first since they may apply platform specific info + +platforms: + - darwin + - linux diff --git a/projects/invisible-island.net/ncurses/test.ts b/projects/invisible-island.net/ncurses/test.ts new file mode 100644 index 00000000..20becc0b --- /dev/null +++ b/projects/invisible-island.net/ncurses/test.ts @@ -0,0 +1,19 @@ +import { assertMatch } from "jsr:@std/assert@^1/match"; +import { backticks, TestOptions } from "brewkit"; + +async function assertOutputMatch(cmd: string, regex: RegExp) { + assertMatch(await backticks`${cmd}`, regex); +} + +export default async function ({ version }: TestOptions) { + await assertOutputMatch( + "ncursesw6-config --version", + new RegExp(`^${version.major}\.${version.minor}\.`), + ); + + // pkg-config --modversion ncursesw | grep {{version.marketing}} + // pkg-config --libs ncursesw | grep '{{prefix}}' + + // https://github.com/pkgxdev/pantry/issues/1658 + // tmux -c ls +} diff --git a/projects/invisible-island.net/ncurses/versions.ts b/projects/invisible-island.net/ncurses/versions.ts new file mode 100644 index 00000000..ceb887ee --- /dev/null +++ b/projects/invisible-island.net/ncurses/versions.ts @@ -0,0 +1,10 @@ +import { github, semver } from "brewkit"; + +export default async function () { + return (await github.tags("mirror/ncurses")).compact( + ({ name: tag }) => { + const version = semver.parse(tag); + return { tag, version }; + }, + ); +} diff --git a/projects/llvm.org/build.ts b/projects/llvm.org/build.ts new file mode 100644 index 00000000..0d878618 --- /dev/null +++ b/projects/llvm.org/build.ts @@ -0,0 +1,142 @@ +import { BuildOptions, env_include, Path, run, SemVer, unarchive } from "brewkit"; + +// # resources +// * https://llvm.org/docs/BuildingADistribution.html + +// # TODO +// the compiler-rt stuff is really only needed for llvm-specific builds like python +// everything else builds without it, so we ideally could split that out into a subpkg +// NOTE that we use the glibc runtime package provided by debian-slim to build without +// these. Which puts that onus on end-users who want to build c programs on top of our +// llvm. + +export default async function build({ prefix, version, tag }: BuildOptions) { + await unarchive( + `https://github.com/llvm/llvm-project/releases/download/${tag}/llvm-project-${version}.src.tar.xz`, + ); + + env_include("ninja cmake"); + + // build what is provided by GNU “bintools” + let tools = "llvm-ar;llvm-as;llvm-nm;llvm-objdump;llvm-size;llvm-strings;llvm-objcopy;llvm-ranlib"; + + // TODO subpkg for this, and another for the compiler-rt stuff + tools += ";llvm-profdata"; + + let platform_specific_cmake_args = ""; + let projects = "clang;lld"; + let extra_targets = ""; + + switch (Deno.build.os) { + case "linux": + env_include("python^3 llvm.org"); + + //using lld speeds things up, the other keeps us GNU glibc by default + platform_specific_cmake_args = ` + -DCLANG_DEFAULT_RTLIB=libgcc + -DCLANG_DEFAULT_LINKER=lld + `; + + // compiler-rt specific stuff + platform_specific_cmake_args += ` + -DCOMPILER_RT_DEFAULT_TARGET_ONLY=ON + -DCMAKE_C_COMPILER_TARGET=x86_64-unknown-linux-gnu + -DLLVM_ENABLE_RUNTIMES=compiler-rt + -DCOMPILER_RT_BUILD_XRAY=OFF + -DCOMPILER_RT_BUILD_LIBFUZZER=OFF + `; + + extra_targets = "compiler-rt"; + break; + + case "darwin": + platform_specific_cmake_args = ` + -DLLVM_ENABLE_EXPORTED_SYMBOLS_IN_EXECUTABLES=OFF + -DDEFAULT_SYSROOT=/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk + `; + } + + /* + CLANG_ENABLE_STATIC_ANALYZER=OFF + - recommended by the LLVM build guide + LLVM_ENABLE_Z3_SOLVER=OFF + - required for the above + */ + + run`cmake + --log-level=DEBUG + -S ./llvm + -B ./o + -G Ninja + + -DCMAKE_INSTALL_PREFIX=${prefix} + -DCMAKE_BUILD_TYPE=Release + + -DCLANG_VENDOR=pkgx + -DBUG_REPORT_URL=https://github.com/pkgxdev/pantry/issues/new + + -DLLVM_ENABLE_RTTI=ON + -DLLVM_ENABLE_LTO=Full + -DLLVM_ENABLE_ZLIB=FORCE_ON + -DLLVM_ENABLE_OCAMLDOC=OFF + -DLLVM_ENABLE_BINDINGS=OFF + -DLLVM_ENABLE_LIBEDIT=OFF + -DLLVM_ENABLE_BACKTRACES=OFF + -DLLVM_ENABLE_ASSERTIONS=OFF + -DLLVM_ENABLE_DOXYGEN=OFF + -DLLVM_ENABLE_SPHINX=OFF + + -DLLVM_INSTALL_TOOLCHAIN_ONLY=ON + + -DLLVM_INCLUDE_DOCS=OFF + -DLLVM_INCLUDE_TESTS=OFF + -DLLVM_INCLUDE_BENCHMARKS=OFF + -DLLVM_INCLUDE_EXAMPLES=OFF + + -DLLVM_BUILD_TESTS=OFF + -DCOMPILER_RT_INCLUDE_TESTS=OFF + -DCOMPILER_RT_USE_LIBCXX=OFF + + -DLLVM_ENABLE_PROJECTS=${projects} + -DLLVM_DISTRIBUTION_COMPONENTS=clang;lld;${tools} + -DLLVM_TOOLCHAIN_TOOLS=${tools} + + -DCLANG_ENABLE_STATIC_ANALYZER=OFF + -DLLVM_ENABLE_Z3_SOLVER=OFF + -DCLANG_ENABLE_ARCMT=OFF + + ${platform_specific_cmake_args} + `; + + run`ninja -C ./o distribution ${extra_targets}`; + run`ninja -C ./o install-distribution-stripped`; + if (Deno.build.os === "linux") { + run`ninja -C ./o install-compiler-rt-stripped`; + } + run`ninja -C ./o install-clang-resource-headers`; // necessary header files or builds just don’t work + + const bin = prefix.join("bin"); + kthxbai_clang_version_suffix(bin, version); + create_bintools_symlinks(bin); +} + +function kthxbai_clang_version_suffix(bin: Path, version: SemVer) { + bin.join(`clang-${version.major}`).mv({ to: bin.join("clang").rm() }); +} + +function create_bintools_symlinks(bin: Path) { + for (const tool of "as ar nm strings objdump ranlib".split(" ")) { + const target = `llvm-${tool}`; + bin.join(tool).ln("s", { target }); + } + bin.join("cc").ln("s", { target: "clang" }); + bin.join("c++").ln("s", { target: "clang++" }); + + // failure all over when using this symlink on macOS + if (Deno.build.os != "darwin") { + bin.join("ld").ln("s", { target: "lld" }); + } + + bin.join("llvm-strip").ln("s", { target: "llvm-objcopy" }); + bin.join("strip").ln("s", { target: "llvm-objcopy" }); +} diff --git a/projects/llvm.org/fixup.ts b/projects/llvm.org/fixup.ts new file mode 100644 index 00000000..78e38611 --- /dev/null +++ b/projects/llvm.org/fixup.ts @@ -0,0 +1,5 @@ +import { Path } from "brewkit"; + +export default function (path: Path): boolean { + return path.extname() != ".a"; +} diff --git a/projects/llvm.org/package.yml b/projects/llvm.org/package.yml new file mode 100644 index 00000000..9aafdfb6 --- /dev/null +++ b/projects/llvm.org/package.yml @@ -0,0 +1,42 @@ +repository: + https://github.com/llvm/llvm-project + +programs: + - bin/clang + - bin/clang++ + - bin/clang-cl + - bin/clang-cpp + - bin/lld + - bin/ld.lld + - bin/ld64.lld + - bin/lld-link + - bin/wasm-ld + - bin/llvm-ar + - bin/llvm-as + - bin/llvm-cov + - bin/llvm-nm + - bin/llvm-objdump + - bin/llvm-profdata + - bin/llvm-readobj + - bin/llvm-size + - bin/llvm-strings + # bintools symlinks + - bin/ar + - bin/as + - bin/cc + - bin/c++ + - bin/objdump + - bin/nm + - bin/strings + +darwin: + programs: + - bin/llvm-dwarfdump + +linux: + dependencies: + zlib.net: ^1 + +platforms: + - darwin + - linux diff --git a/projects/llvm.org/test.c b/projects/llvm.org/test.c new file mode 100644 index 00000000..6dc5a2e1 --- /dev/null +++ b/projects/llvm.org/test.c @@ -0,0 +1,6 @@ +#include +#include +int main() { + printf("Hello World!\n"); + return 0; +} diff --git a/projects/llvm.org/test.ts b/projects/llvm.org/test.ts new file mode 100644 index 00000000..6fef5ce8 --- /dev/null +++ b/projects/llvm.org/test.ts @@ -0,0 +1,8 @@ +import { backticks, run, undent } from "brewkit"; +import { assertEquals } from "jsr:@std/assert@^1/equals"; + +export default async function () { + run`clang test.c`; + const out = await backticks`./a.out`; + assertEquals(out, "Hello World!"); +} diff --git a/projects/llvm.org/versions.ts b/projects/llvm.org/versions.ts new file mode 100644 index 00000000..dbe4de07 --- /dev/null +++ b/projects/llvm.org/versions.ts @@ -0,0 +1,12 @@ +import { github, Range, SemVer, semver } from "brewkit"; + +export default async function (constraint: Range) { + return (await github.releases("llvm/llvm-project", constraint)).compact( + ({ tag_name: tag }) => { + const version = semver.parse(tag.replace(/^llvmorg-/, "")); + if (version) { + return { version, tag }; + } + }, + ); +} diff --git a/projects/nasm.us/build.ts b/projects/nasm.us/build.ts new file mode 100644 index 00000000..c7a08a69 --- /dev/null +++ b/projects/nasm.us/build.ts @@ -0,0 +1,18 @@ +import { BuildOptions, env_include, Path, run, unarchive } from "brewkit"; + +export default async function ({ prefix, tag }: BuildOptions) { + await unarchive(`https://www.nasm.us/pub/nasm/releasebuilds/${tag}/nasm-${tag}.tar.xz`); + + if (Deno.build.os != "windows") { + run`./configure --prefix=${prefix}`; + run`make --jobs ${navigator.hardwareConcurrency}`; + run`make install`; + } else { + run`nmake /f Mkfiles/msvc.mak`; + + Path.cwd().join("nasm.exe").cp({ into: prefix.bin.mkdir('p') }); + Path.cwd().join("ndisasm.exe").cp({ into: prefix.bin }); + Path.cwd().join("include").cp({ into: prefix }); + Path.cwd().join("libnasm.lib").cp({ into: prefix.join('lib').mkdir() }); + } +} diff --git a/projects/nasm.us/package.yml b/projects/nasm.us/package.yml new file mode 100644 index 00000000..f3fed8fc --- /dev/null +++ b/projects/nasm.us/package.yml @@ -0,0 +1,35 @@ +name: + NASM + +unabbreviated-name: Netwide Assembler + +distributable: + url: https://www.nasm.us/pub/nasm/releasebuilds/{{version.raw}}/nasm-{{version.raw}}.tar.xz + strip-components: 1 + +versions: + url: https://www.nasm.us/pub/nasm/releasebuilds/ + match: /"\d+\.\d+(\.\d+)?\/"/ + strip: + - /^"/ + - /\/"$/ + +build: + script: + - ./configure --prefix="{{prefix}}" + - run: | + make --jobs {{hw.concurrency}} rdf + make install install_rdf + if: <2.16 + # rdoff tools removed as unfixable + # https://github.com/netwide-assembler/nasm/commit/93548c2de2a3c218b3d0ab4061b26d9781cb6b37 + - run: | + make --jobs {{hw.concurrency}} + make install + if: '>=2.16' + +test: nasm --version + +programs: + - bin/nasm + - bin/ndisasm diff --git a/projects/nasm.us/versions.ts b/projects/nasm.us/versions.ts new file mode 100644 index 00000000..08c97d68 --- /dev/null +++ b/projects/nasm.us/versions.ts @@ -0,0 +1,14 @@ +import { SemVer } from "brewkit"; + +export default async function () { + const rsp = await fetch("https://www.nasm.us/pub/nasm/releasebuilds/"); + const txt = await rsp.text(); + const matches = txt.matchAll(/"(\d+\.\d+(\.\d+))?\/"/mg); + const rv = []; + for (const match of matches) { + const tag = match[1]; + const version = new SemVer(match[1]); + rv.push({ version, tag }); + } + return rv; +} diff --git a/projects/ninja-build.org/build.ts b/projects/ninja-build.org/build.ts new file mode 100644 index 00000000..3e5715cb --- /dev/null +++ b/projects/ninja-build.org/build.ts @@ -0,0 +1,14 @@ +import { BuildOptions, run, unarchive } from "brewkit"; +import env_include from "../../brewkit/env-include.ts"; + +export default async function ({ prefix, tag }: BuildOptions) { + await unarchive(`https://github.com/ninja-build/ninja/archive/refs/tags/${tag}.tar.gz`); + + if (Deno.build.os != "darwin") { + await env_include("python^3"); + } + + run`./configure.py --bootstrap`; + + prefix.bin.install("ninja"); +} diff --git a/projects/ninja-build.org/package.yml b/projects/ninja-build.org/package.yml new file mode 100644 index 00000000..6bce1997 --- /dev/null +++ b/projects/ninja-build.org/package.yml @@ -0,0 +1,12 @@ +name: + Ninja + +programs: + - bin/ninja + +repository: + https://github.com/ninja-build/ninja + +platforms: + - darwin + - linux diff --git a/projects/ninja-build.org/test.ts b/projects/ninja-build.org/test.ts new file mode 100644 index 00000000..02e5bc04 --- /dev/null +++ b/projects/ninja-build.org/test.ts @@ -0,0 +1,5 @@ +import { run } from "brewkit"; + +export default async function () { + run`ninja --version`; +} diff --git a/projects/nixos.org/patchelf/build.ts b/projects/nixos.org/patchelf/build.ts new file mode 100644 index 00000000..c14120e1 --- /dev/null +++ b/projects/nixos.org/patchelf/build.ts @@ -0,0 +1,8 @@ +import { BuildOptions, run, unarchive } from "brewkit"; + +export default async function ({ prefix, version }: BuildOptions) { + await unarchive(`https://github.com/NixOS/patchelf/releases/download/${version}/patchelf-${version}.tar.bz2`); + + run`./configure --prefix=${prefix}`; + run`make --jobs ${navigator.hardwareConcurrency} install`; +} diff --git a/projects/nixos.org/patchelf/package.yml b/projects/nixos.org/patchelf/package.yml new file mode 100644 index 00000000..8c54d0ce --- /dev/null +++ b/projects/nixos.org/patchelf/package.yml @@ -0,0 +1,12 @@ +name: + patchelf + +repository: + https://github.com/NixOS/patchelf + +programs: + - bin/patchelf + +platforms: + - darwin + - linux diff --git a/projects/nixos.org/patchelf/test.ts b/projects/nixos.org/patchelf/test.ts new file mode 100644 index 00000000..0de2aee0 --- /dev/null +++ b/projects/nixos.org/patchelf/test.ts @@ -0,0 +1,5 @@ +import { run } from "brewkit"; + +export default async function () { + run`patchelf --version`; // FIXME better +} diff --git a/projects/openssl.org/README.md b/projects/openssl.org/README.md new file mode 100644 index 00000000..7ac89f95 --- /dev/null +++ b/projects/openssl.org/README.md @@ -0,0 +1,31 @@ +# OpenSSL + +## Configuration Files + +- `/etc/ssl/openssl.cnf` +- `$OPENSSL_CONF` +- `$OPENSSL_HOME/openssl.cnf` + +## CA Certificates + +- On macOS and Windows the system provides Certificate Authorities. +- On *nix, CA certificates are provided by [curl.se/ca-certs]. + +We build OpenSSL with the following search path: + +- The `SSL_CERT_FILE` environment variable +- `$PREFIX/share/ca-certs.pem` +- `/etc/ssl/cert.pem` + +Programs that use OpenSSL will find the CA Certificates automatically. + +If for some reason a program does not, you can usually configure it with an +environment variable or the default lookup is usually `/etc/ssl/cert.pem` so +you could copy the certs there. + +`pkgx` will set `SSL_CERT_FILE` automatically when invoked. You can override +this, either by setting it yourself in which case it `pkgx`, or by running +programs _without_ ca-certs either by disabling companions or removing it +explicitly. + +[curl.se/ca-certs]: https://pkgx.dev/pkgs/curl.se/ca-certs diff --git a/projects/openssl.org/build.ts b/projects/openssl.org/build.ts new file mode 100644 index 00000000..4ee080e6 --- /dev/null +++ b/projects/openssl.org/build.ts @@ -0,0 +1,56 @@ +import { env_include, BuildOptions, Range, run, SemVer, unarchive } from "brewkit"; + +export default async function ({ prefix, version, props }: BuildOptions) { + await unarchive(`https://www.openssl.org/source/openssl-${version}.tar.gz`); + + let extra = ""; + if (Deno.build.os == 'windows') { + env_include('nasm.us'); + } else { + // doesn’t build on windows + // supposedly important optimization + extra = "enable-ec_nistp_64_gcc_128"; + // windows is built to be relocatable but other platforms are not + run`patch -p1 --input ${props}/relocatable.diff`; + } + if (Deno.build.os != 'darwin') { + env_include('perl'); + } + + run` + perl + ./Configure + --prefix=${prefix} + ${target(version)} + no-tests + ${extra} + --openssldir=/etc/ssl + `; + + if (Deno.build.os != 'windows') { + run`make --jobs ${navigator.hardwareConcurrency}`; + run`make install_sw`; + } else { + run`nmake`; + run`nmake install_sw`; + } + + // weird choices from openssl here + prefix.join("lib64").isDirectory()?.mv({ to: prefix.lib }); +} + +function target(version: SemVer) { + if (!new Range("^1").satisfies(version)) return ""; + switch (`${Deno.build.os}/${Deno.build.arch}`) { + case "darwin/aarch64": + return "darwin64-arm64-cc"; + case "darwin/x86-64": + return "darwin64-x86_64-cc"; + case "linux/aarch64": + return "linux-aarch64"; + case "linux/x86-64": + return "linux-x86_64"; + default: + throw new Error("unsupported platform"); + } +} diff --git a/projects/openssl.org/package.yml b/projects/openssl.org/package.yml new file mode 100644 index 00000000..ac32d26a --- /dev/null +++ b/projects/openssl.org/package.yml @@ -0,0 +1,17 @@ +name: + OpenSSL + +repository: + https://github.com/openssl/openssl + +linux: + companions: + - curl.se/ca-certs + +programs: + - bin/openssl + - bin/c_rehash + +platforms: + - darwin + - linux diff --git a/projects/openssl.org/relocatable.diff b/projects/openssl.org/relocatable.diff new file mode 100644 index 00000000..9e177a6e --- /dev/null +++ b/projects/openssl.org/relocatable.diff @@ -0,0 +1,47 @@ +diff --git i/crypto/x509/x509_def.c w/crypto/x509/x509_def.c +index 7d5b642..f65fa3c 100644 +--- i/crypto/x509/x509_def.c ++++ w/crypto/x509/x509_def.c +@@ -95,13 +95,41 @@ const char *X509_get_default_cert_dir(void) + #endif + } + ++#if !defined (_WIN32) ++static pthread_once_t pkgx_cert_file_once = PTHREAD_ONCE_INIT; ++static const char *pkgx_cert_file = X509_CERT_FILE; ++ ++#ifdef __linux__ ++#define __USE_GNU ++#endif ++#include /* dladdr */ ++#include /* dirname */ ++ ++static void init_pkgx_cert_file(void) { ++ Dl_info info; ++ if (!dladdr(X509_get_default_cert_file, &info)) return; ++ const char *openssl_prefix = dirname(dirname(info.dli_fname)); ++ #define PEM_FILE_PARTIAL_PATH "/share/ca-certs.pem" ++ char *cert_file = malloc(strlen(openssl_prefix) + sizeof(PEM_FILE_PARTIAL_PATH)); ++ if (!cert_file) return; ++ strcpy(cert_file, openssl_prefix); ++ strcat(cert_file, PEM_FILE_PARTIAL_PATH); ++ if (access(cert_file, R_OK) == 0) { ++ pkgx_cert_file = cert_file; ++ } else { ++ free(cert_file); ++ } ++} ++#endif ++ + const char *X509_get_default_cert_file(void) + { + #if defined (_WIN32) + RUN_ONCE(&openssldir_setup_init, do_openssldir_setup); + return x509_cert_fileptr; + #else +- return X509_CERT_FILE; ++ pthread_once(&pkgx_cert_file_once, init_pkgx_cert_file); ++ return pkgx_cert_file; + #endif + } + diff --git a/projects/openssl.org/test.ts b/projects/openssl.org/test.ts new file mode 100644 index 00000000..9ebbb91b --- /dev/null +++ b/projects/openssl.org/test.ts @@ -0,0 +1,23 @@ +import { assertEquals } from "jsr:@std/assert@^1"; +import { run, TestOptions } from "brewkit"; + +export default async function ({ prefix, version }: TestOptions) { + Deno.writeTextFileSync("./in", "This is a test file\n"); + + run`openssl dgst -sha256 -out out ./in`; + + if (version.major >= 3) { + assertEquals( + Deno.readTextFileSync("out"), + "SHA2-256(./in)= c87e2ca771bab6024c269b933389d2a92d4941c848c52f155b9b84e1f109fe35\n", + ); + } else { + assertEquals( + Deno.readTextFileSync("out"), + "SHA256(./in)= c87e2ca771bab6024c269b933389d2a92d4941c848c52f155b9b84e1f109fe35\n", + ); + } + + // test we find and use curl.se/ca-certs + run`openssl s_client -connect example.com:443 -verify_return_error`; +} diff --git a/projects/openssl.org/versions.ts b/projects/openssl.org/versions.ts new file mode 100644 index 00000000..b616e1d2 --- /dev/null +++ b/projects/openssl.org/versions.ts @@ -0,0 +1,26 @@ +import { github, Range, SemVer, semver } from "brewkit"; + +export default async function (constraint: Range) { + const rv = []; + if ( + constraint.toString() != "*" && + semver.intersects(constraint, new Range("^1")) + ) { + const tags = await github.tags("openssl/openssl"); + for (const { name: tag } of tags) { + const match = tag.match(/^OpenSSL_(\d+_\d+_\d+[a-z]?)$/); + if (match) { + const version = new SemVer(match[1].replace(/_/g, ".")); + rv.push({ tag, version }); + } + } + } + if (semver.intersects(constraint, new Range(">=3"))) { + const releases = await github.releases("openssl/openssl", constraint); + for (const { tag_name: tag } of releases) { + const version = new SemVer(tag.replace(/^openssl-/, "")); + rv.push({ tag, version }); + } + } + return rv; +} diff --git a/projects/perl.org/README.md b/projects/perl.org/README.md new file mode 100644 index 00000000..767b8aeb --- /dev/null +++ b/projects/perl.org/README.md @@ -0,0 +1,7 @@ +# Perl + +We have configured Perl so that its site directory is a sensible global system +location: + +* macOS: `/Library/Perl/x.y` +* Linux: `/usr/local/lib/site_perl/x.y` diff --git a/projects/perl.org/build.ts b/projects/perl.org/build.ts new file mode 100644 index 00000000..ab6f5b1a --- /dev/null +++ b/projects/perl.org/build.ts @@ -0,0 +1,90 @@ +import { BuildOptions, inreplace, run, unarchive } from "brewkit"; + +export default async function ({ prefix, version }: BuildOptions) { + await unarchive(`https://www.cpan.org/src/${version.major}.0/perl-${version}.tar.xz`); + + if (Deno.build.os == 'windows') { + Deno.chdir("win32"); + run`nmake /M INST_TOP=${prefix} CCTYPE=MSVC143 install`; + prefix.join("html").rm("rf"); + } else { + const extra = Deno.build.os == "linux" ? "-Accflags=-fPIC" : ""; + + run`./Configure + -d + -e + -Dprefix=${prefix} + -Duselargefiles + -Dusethreads + -Duseshrplib=false + -Duserelocatableinc + -DEBUGGING=none + -Dsiteman1dir=/usr/local/share/man/man1 + -Dsiteman3dir=/usr/local/share/man/man3 + ${extra}`; + + run`make --jobs ${navigator.hardwareConcurrency} install`; + + const sitearchexp = await (async () => { + for await (const [path, {name}] of prefix.lib.join(`perl5/${version}`).ls()) { + if (name.includes("thread-multi")) { + return path; + } + } + })(); + + const config_pm = sitearchexp!.join("Config.pm").chmod(0o644); + + // -Duserelocatableinc sets these to install site pkgs in the keg + // but we don’t want that for site pkgs, we just want perl to be + // able to find its system packages in a relocatable way + inreplace( + config_pm, + /sitearchexp => .*/g, + `sitearchexp => '${site_prefix()}/${sitearchexp!.basename()}',`, + ); + inreplace( + config_pm, + /sitelibexp => .*/g, + `sitelibexp => '${site_prefix()}/${version.marketing}',`, + ); + inreplace(config_pm, /scriptdir => .*/g, "scriptdir => '/usr/local/bin',"); + + //FIXME doesn’t work + // cpan installs to ~/.local + // inreplace( + // prefix.join(`lib/${version}/CPAN/FirstTime.pm`).chmod(0o644), + // "return File::Spec->catdir(_local_lib_home(), 'perl5');", + // "return File::Spec->catdir(_local_lib_home(), '.local')" + // ); + + //FIXME doesn’t work + // inreplace( + // prefix.join("lib/5.40.1/CPAN/HandleConfig.pm").chmod(0o644), + // "my $dotcpan = $^O eq 'VMS' ? '_cpan' : '.cpan';", + // "my $dotcpan = $^O eq 'VMS' ? '_cpan' : '.local/share/cpan';", + // ) + + // tidy + inreplace(config_pm, /libpth => .*/g, "libpth => '/usr/local/lib /usr/lib',"); + + // rewrite hardcoded prefixes + for await (const [path, { name }] of prefix.bin.ls()) { + if (name == "perl" || name == `perl${version}`) continue; + inreplace(path, `exec ${prefix.bin.join("perl")}`, "exec perl"); + } + + inreplace(prefix.bin.join("perlivp"), /^\s*my \$perlpath = .*;$/g, "my $perlpath = $^X;"); + } +} + +function site_prefix() { + switch (Deno.build.os) { + case "darwin": + return "/Library/Perl"; + case "linux": + return `/usr/local/lib/perl`; + default: + throw new Error(); + } +} diff --git a/projects/perl.org/package.yml b/projects/perl.org/package.yml new file mode 100644 index 00000000..2eb51c18 --- /dev/null +++ b/projects/perl.org/package.yml @@ -0,0 +1,43 @@ +name: Perl + +repository: + https://github.com/perl/perl5 + +programs: + - bin/perl + - bin/corelist + - bin/cpan + - bin/enc2xs + - bin/encguess + - bin/h2ph + - bin/h2xs + - bin/instmodsh + - bin/json_pp + - bin/libnetcfg + - bin/perlbug + - bin/perldoc + - bin/perlivp + - bin/perlthanks + - bin/piconv + - bin/pl2pm + - bin/pod2html + - bin/pod2man + - bin/pod2text + - bin/pod2usage + - bin/podchecker + - bin/prove + - bin/ptar + - bin/ptardiff + - bin/ptargrep + - bin/shasum + - bin/splain + - bin/streamzip + - bin/xsubpp + - bin/zipdetails + +platforms: + - darwin + - linux + +knobs: + - side-by-side diff --git a/projects/perl.org/sandbox.sb b/projects/perl.org/sandbox.sb new file mode 100644 index 00000000..5093946f --- /dev/null +++ b/projects/perl.org/sandbox.sb @@ -0,0 +1,2 @@ +(deny process-exec* (subpath "/usr/bin/perl*")) +(deny file-read* (subpath "/System/Library/Perl/*")) diff --git a/projects/perl.org/test.ts b/projects/perl.org/test.ts new file mode 100644 index 00000000..46ccf68c --- /dev/null +++ b/projects/perl.org/test.ts @@ -0,0 +1,7 @@ +import { run } from "brewkit"; + +export default function () { + Deno.writeTextFileSync("fixture.pl", "print 'Perl is not an acronym, but JAPH is a Perl acronym!'"); + run`perl ./fixture.pl`; + run`shasum --version`; +} diff --git a/projects/perl.org/versions.ts b/projects/perl.org/versions.ts new file mode 100644 index 00000000..07307a4f --- /dev/null +++ b/projects/perl.org/versions.ts @@ -0,0 +1,11 @@ +import { github, Range, SemVer, semver } from "brewkit"; + +export default async function () { + return (await github.tags("perl/perl5")).compact(({ name: tag }) => { + const version = semver.parse(tag); + if (version && version.minor % 2 == 0) { + // ^^ perl odd minor releases are testing releases + return { version, tag }; + } + }); +} diff --git a/projects/python.org/build.ts b/projects/python.org/build.ts new file mode 100644 index 00000000..5c3bb845 --- /dev/null +++ b/projects/python.org/build.ts @@ -0,0 +1,61 @@ +import { BuildOptions, env_include, inreplace, Path, run, SemVer, unarchive } from "brewkit"; + +export default async function ( + { tag, prefix, deps, version, props }: BuildOptions, +) { + tag = tag.replace(/^v/, ""); + await unarchive( + `https://www.python.org/ftp/python/${tag}/Python-${tag}.tar.xz`, + ); + + if (Deno.build.os == "linux") { + // --enable-optimizations requires llvm-profdata + await env_include("llvm.org"); + } + + if (version.lt(new SemVer("3.12"))) { + inreplace( + Path.cwd().join("setup.py"), + /system_lib_dirs = .*/g, + `system_lib_dirs = os.getenv("LIBRARY_PATH").split(":")`, + ); + inreplace( + Path.cwd().join("setup.py"), + /system_include_dirs = .*/g, + `system_include_dirs = os.getenv("CPATH").split(":")`, + ); + } + + //NOTE clang required for --enable-optimizations + //TODO --enable-bolt reduces end filesize (requires llvm-bolt) + run`./configure + --prefix=${prefix} + --with-openssl=${deps["openssl.org"].prefix} + --with-system-expat + --with-system-ffi + --with-system-libmpdec + --enable-shared + --enable-optimizations + --with-lto=full + --without-ensurepip + --disable-test-modules + --enable-loadable-sqlite-extensions + --with-configdir=/etc/python + CC=clang + `; + run`make --jobs ${navigator.hardwareConcurrency}`; + run`make install`; + + props.join("sitecustomize.py").cp({ into: prefix.lib.join(`python${version.marketing}`) }); + + // provide unversioned binaries + const v = `${version.major}.${version.minor}`; + prefix.join("bin/python").ln("s", { target: `python${v}` }); + prefix.join("bin/pydoc").ln("s", { target: `pydoc${v}` }); + prefix.join("bin/python-config").ln("s", { target: `python${v}-config` }); + + // idle is prehistoric and nobody wants it + prefix.join(`bin/idle${v}`).rm(); + prefix.join(`bin/idle${version.major}`).rm(); + prefix.join(`lib/python${v}/idlelib`).rm("rf"); +} diff --git a/projects/python.org/package.yml b/projects/python.org/package.yml new file mode 100644 index 00000000..70d6921f --- /dev/null +++ b/projects/python.org/package.yml @@ -0,0 +1,40 @@ +name: + Python + +repository: + https://github.com/python/cpython + +# companions: +# - python.org/pip + +dependencies: + openssl.org: ^3 + tukaani.org/xz: ^5 + +linux: + dependencies: + github.com/libexpat: ^2 + sourceware.org/bzip2: ^1 + gnu.org/readline: ^7,^8 + invisible-island.net/ncurses: ^6 + zlib.net: ^1 + tukaani.org/xz: ^5 + sqlite.org: ^3 + sourceware.org/libffi: ^3 + # TODO + # bytereef.org/mpdecimal: 2 + # tcl-lang.org: 8 +programs: + - bin/python + - bin/python-config + - bin/pydoc +#TODO +#- bin/python{{ version.major }} +#- bin/python{{ version.marketing }} + +platforms: + - darwin + - linux + +knobs: + - side-by-side diff --git a/projects/python.org/pip/build.ts b/projects/python.org/pip/build.ts new file mode 100644 index 00000000..c2a18636 --- /dev/null +++ b/projects/python.org/pip/build.ts @@ -0,0 +1,40 @@ +import { BuildOptions, env_include, Path, run, unarchive, undent } from "brewkit"; + +export default async function ({ prefix, tag, version, deps }: BuildOptions) { + await unarchive(`https://github.com/pypa/pip/archive/refs/tags/${tag}.tar.gz`); + + const rsp = await fetch(`https://bootstrap.pypa.io/get-pip.py`); + using file = await Deno.open("get-pip.py", { write: true, create: true }); + await rsp.body!.pipeTo(file.writable); + + env_include("python.org"); + + run`python get-pip.py --target=bootstrap`; + + Deno.env.set("PYTHONPATH", Path.cwd().join("bootstrap").string); + run`python -m pip install . --target=${prefix}`; + + prefix.join(`pip-${version}.dist-info`).rm("rf"); + + // delete pip3, pip3.13 etc. + for await (const [path, { name }] of prefix.bin.ls()) { + if (name !== "pip") path.rm(); + } + + prefix.join("pip").mv({ into: prefix.join("lib/python").mkdir("p") }); + + const pipfile = prefix.bin.join("pip"); + let contents = await prefix.bin.join("pip").read(); + contents = contents.split("\n").slice(1).join("\n"); + + pipfile.write(undent` + #!/bin/sh + """:" + d="$(cd "$(dirname "$0")/.." && pwd)" + export PIP_DISABLE_PIP_VERSION_CHECK=1 + export PYTHONPATH="$d/lib/python\${PYTHONPATH:+:$PYTHONPATH}" + exec python "$0" "$@" + ":""" + + ${contents}`); +} diff --git a/projects/python.org/pip/package.yml b/projects/python.org/pip/package.yml new file mode 100644 index 00000000..3f5c9cbe --- /dev/null +++ b/projects/python.org/pip/package.yml @@ -0,0 +1,8 @@ +name: + pip + +programs: + - bin/pip + +companions: + - python.org diff --git a/projects/python.org/pip/test.ts b/projects/python.org/pip/test.ts new file mode 100644 index 00000000..7e84c039 --- /dev/null +++ b/projects/python.org/pip/test.ts @@ -0,0 +1,17 @@ +import { backticks, Path, run, TestOptions } from "brewkit"; +import { assertMatch } from "jsr:@std/assert@^1/match"; +import { assertPath } from "https://jsr.io/@std/path/1.0.8/_common/assert_path.ts"; + +export default async function ({ version, prefix }: TestOptions) { + //TODO presumably needs encoding in the package.yml + if (version.major < 19) { + run`pkgx +python.org~3.9 pip install findtui --target .`; + } else { + run`pip install findtui --target foo`; + } + + assertPath(Path.cwd().join("findtui/main.py").string); + + assertMatch(await backticks`pip --version`, new RegExp(prefix.string)); + assertMatch(await backticks`pip --version`, new RegExp(version.toString())); +} diff --git a/projects/python.org/pip/versions.ts b/projects/python.org/pip/versions.ts new file mode 100644 index 00000000..d6f8078d --- /dev/null +++ b/projects/python.org/pip/versions.ts @@ -0,0 +1,12 @@ +import { github, semver } from "brewkit"; +import SemVer from "https://deno.land/x/libpkgx@v0.20.3/src/utils/semver.ts"; + +export default async function () { + return (await github.tags("pypa/pip")).compact(({ name: tag }) => { + const version = semver.parse(tag); + // the major prior to 18 is 10, which is ancient and doesn't currently work + if (version && version.gt(new SemVer("10"))) { + return { version, tag }; + } + }); +} diff --git a/projects/python.org/sandbox.sb b/projects/python.org/sandbox.sb new file mode 100644 index 00000000..4cf59bfe --- /dev/null +++ b/projects/python.org/sandbox.sb @@ -0,0 +1,3 @@ +(deny process-exec* + (literal "/usr/bin/pip3") + (literal "/usr/bin/python3")) diff --git a/projects/python.org/sitecustomize.py b/projects/python.org/sitecustomize.py new file mode 100644 index 00000000..36897aba --- /dev/null +++ b/projects/python.org/sitecustomize.py @@ -0,0 +1,12 @@ +import site +import sys + +# tea considers /usr/local the "global install" location +# setting `sys.prefix` is kinda nuts but brew does the same +# so we're assuming they vetted all options +sys.prefix = "/usr/local" + +# enable automatic user installs if pip makes the decision +# that this is good idea. eg. if /usr/local isn't writable +# this is what it will do +site.ENABLE_USER_SITE = True diff --git a/projects/python.org/test.ts b/projects/python.org/test.ts new file mode 100644 index 00000000..f69daa2a --- /dev/null +++ b/projects/python.org/test.ts @@ -0,0 +1,19 @@ +import { run } from "brewkit"; + +export default async function () { + //FIXME on linux should work but we need to add sqlite to the env first + if (Deno.build.os == "darwin") { + // Check if sqlite is ok, because we build with --enable-loadable-sqlite-extensions + // and it can occur that building sqlite silently fails if OSX's sqlite is used. + run`python -c "import sqlite3"`; + } + + // check to see if we can create a venv + // we can't ensurepip on 3.7 so, this won't work + run`python -m venv myvenv`; + + // Check if some other modules import. Then the linked libs are working. + run`python -c "import zlib"`; + run`python -c "import pyexpat"`; + run`python -v -c "import _ctypes"`; +} diff --git a/projects/python.org/versions.ts b/projects/python.org/versions.ts new file mode 100644 index 00000000..5b1cae1c --- /dev/null +++ b/projects/python.org/versions.ts @@ -0,0 +1,10 @@ +import { github, semver } from "brewkit"; + +export default async function () { + return (await github.tags("python/cpython")).compact(({ name: tag }) => { + const version = semver.parse(tag); + if (version) { + return { version, tag }; + } + }); +} diff --git a/projects/pyyaml.org/libyaml/build.ts b/projects/pyyaml.org/libyaml/build.ts new file mode 100644 index 00000000..42ece63c --- /dev/null +++ b/projects/pyyaml.org/libyaml/build.ts @@ -0,0 +1,8 @@ +import { BuildOptions, run, unarchive } from "brewkit"; + +export default async function ({ prefix, version }: BuildOptions) { + await unarchive(`https://pyyaml.org/download/libyaml/yaml-${version}.tar.gz`); + + run`./configure --prefix=${prefix} --disable-debug`; + run`make --jobs ${navigator.hardwareConcurrency} install`; +} diff --git a/projects/pyyaml.org/libyaml/package.yml b/projects/pyyaml.org/libyaml/package.yml new file mode 100644 index 00000000..54954eea --- /dev/null +++ b/projects/pyyaml.org/libyaml/package.yml @@ -0,0 +1,9 @@ +name: + PyYAML + +repository: + https://github.com/yaml/libyaml + +platforms: + - darwin + - linux diff --git a/projects/pyyaml.org/libyaml/test.c b/projects/pyyaml.org/libyaml/test.c new file mode 100644 index 00000000..cb968879 --- /dev/null +++ b/projects/pyyaml.org/libyaml/test.c @@ -0,0 +1,8 @@ + #include + +int main() { + yaml_parser_t parser; + yaml_parser_initialize(&parser); + yaml_parser_delete(&parser); + return 0; +} diff --git a/projects/pyyaml.org/libyaml/test.ts b/projects/pyyaml.org/libyaml/test.ts new file mode 100644 index 00000000..6b11ace3 --- /dev/null +++ b/projects/pyyaml.org/libyaml/test.ts @@ -0,0 +1,6 @@ +import { run } from "brewkit"; + +export default function () { + run`cc test.c -lyaml`; + run`./a.out`; +} diff --git a/projects/ruby-lang.org/build.ts b/projects/ruby-lang.org/build.ts new file mode 100644 index 00000000..ee1dec92 --- /dev/null +++ b/projects/ruby-lang.org/build.ts @@ -0,0 +1,34 @@ +import { BuildOptions, inreplace, run, stub, unarchive } from "brewkit"; +import env_include from "../../brewkit/env-include.ts"; + +export default async function ({ prefix, version }: BuildOptions) { + await unarchive(`https://cache.ruby-lang.org/pub/ruby/${version.marketing}/ruby-${version}.tar.gz`); + + env_include("rust"); + + run`./configure + --prefix=${prefix} + --disable-debug + --enable-load-relative # makes us relocatable + --without-gmp + --enable-shared + --with-sitedir=/Library/Ruby/Site + --with-vendordir=/Library/Ruby/Gems + --disable-install-doc + --disable-install-rdoc + --disable-install-capi + --disable-install-static-library + --with-rubyarchprefix=${prefix}/lib/ruby # no need for architecture specific crap + --with-rubyhdrdir=${prefix}/include # ^^ + --with-rubyarchhdrdir=${prefix}/include # ^^ + --with-vendordir=no # is empty so don’t pollute + --with-vendorarchdir=no # ^^ + --with-sitearchdir=no # ^^ + --enable-yjit # https://github.com/pkgxdev/pantry/issues/3538 + `; + + // prevent install failure due to no ability to create these dirs + inreplace("./tool/rbinstall.rb", /prepare "extension scripts", (site|vendor)(arch)?libdir/g, ""); + + run`make --jobs ${navigator.hardwareConcurrency} install`; +} diff --git a/projects/ruby-lang.org/dev.ts b/projects/ruby-lang.org/dev.ts new file mode 100644 index 00000000..578f92ed --- /dev/null +++ b/projects/ruby-lang.org/dev.ts @@ -0,0 +1,5 @@ +import { Path } from "brewkit"; + +export default function (cwd: Path) { + return cwd.join(".rubyversion").read(); +} diff --git a/projects/ruby-lang.org/package.yml b/projects/ruby-lang.org/package.yml new file mode 100644 index 00000000..3e3746e3 --- /dev/null +++ b/projects/ruby-lang.org/package.yml @@ -0,0 +1,26 @@ +name: Ruby + +repository: + https://github.com/ruby/ruby + +dependencies: + openssl.org: ^3 + pyyaml.org/libyaml: ^0.2.5 + +dev: + - .rubyversion + +programs: + - bin/ruby + - bin/erb + - bin/irb + - bin/rake + - bin/rdoc + - bin/ri + - bin/bundle + - bin/bundler + - bin/gem + +platforms: + - darwin + - linux diff --git a/projects/ruby-lang.org/test.ts b/projects/ruby-lang.org/test.ts new file mode 100644 index 00000000..a234a0d0 --- /dev/null +++ b/projects/ruby-lang.org/test.ts @@ -0,0 +1,41 @@ +import { backticks, run, undent } from "brewkit"; +import { assertFalse } from "jsr:@std/assert@^1/false"; + +export default async function () { + run`ruby -e 'puts "Hello World!"'`; + + // test bundled ruby libraries can be used + Deno.writeTextFileSync( + "test.rb", + undent` + require 'date' + puts Date.today`, + ); + run`ruby ./test.rb`; + + // tests the gems that come with Ruby can be utilized + Deno.writeTextFileSync( + "test.rb", + undent` + require 'matrix' + matrix = Matrix[[1, 2], [3, 4]] + transpose_matrix = matrix.transpose + determinant = matrix.determinant + puts "Original matrix:\n#{matrix}" + puts "Transposed matrix:\n#{transpose_matrix}" + puts "Determinant of the matrix: #{determinant}"`, + ); + run`ruby ./test.rb`; + + Deno.writeTextFileSync( + "test.rb", + undent` + def fib(n) + return n if n <= 1 + fib(n-1) + fib(n-2) + end + puts fib(35)`, + ); + const out = await backticks`ruby --yjit ./test.rb`; + assertFalse(out.includes("warning")); +} diff --git a/projects/ruby-lang.org/versions.ts b/projects/ruby-lang.org/versions.ts new file mode 100644 index 00000000..079afb3e --- /dev/null +++ b/projects/ruby-lang.org/versions.ts @@ -0,0 +1,10 @@ +import { github, semver } from "brewkit"; + +export default async function () { + return (await github.tags("ruby/ruby")).compact( + ({ name: tag }) => { + const version = semver.parse(tag.replaceAll("_", ".")); + return version && { tag, version }; + }, + ); +} diff --git a/projects/rust-lang.org/build.ts b/projects/rust-lang.org/build.ts new file mode 100644 index 00000000..679d18eb --- /dev/null +++ b/projects/rust-lang.org/build.ts @@ -0,0 +1,67 @@ +import { BuildOptions, env_include, Path, run, unarchive } from "brewkit"; +import { walk, WalkOptions } from "jsr:@std/fs@1/walk"; + +export default async function ({ prefix, version }: BuildOptions) { + await unarchive(`https://static.rust-lang.org/dist/rust-${version}-${Deno.build.target}.tar.xz`); + + const opts: WalkOptions = { + maxDepth: 2, + includeDirs: true, + includeFiles: false, + includeSymlinks: false, + }; + + for await (const { name, ...entry } of walk(".", opts)) { + const path = Path.cwd().join(entry.path); + switch (name) { + case "bin": + case "etc": + case "lib": + case "libexec": + case "share": + await merge(path, prefix.join(name).mkdir("p")); + } + } + + prefix.bin.join("rls").rm(); // deprecated + prefix.lib.join("rustlib/aarch64-apple-darwin/analysis").rm("rf"); // just a weird json file + + for await (const [path, { name }] of prefix.lib.join(`rustlib/${Deno.build.target}/bin`).ls()) { + if (name !== "rust-objcopy") { + path.rm("rf"); + } + } +} + +async function merge(src: Path, dest: Path) { + for await (const entry of Deno.readDir(src.string)) { + const srcPath = src.join(entry.name); + const destPath = dest.join(entry.name); + + if (entry.isFile) { + srcPath.mv({ to: destPath }); + } else if (entry.isDirectory) { + await merge(srcPath, destPath.mkdir()); + } + } +} + +export async function build_from_source({ prefix, version }: BuildOptions) { + await unarchive(`https://static.rust-lang.org/dist/rustc-${version}-src.tar.gz`); + + env_include("cmake ninja python~3.9 openssl.org^3"); + + if (Deno.build.os == "darwin") { + // fix machos needs this + Deno.env.set("RUSTFLAGS", "-C link-args=-headerpad_max_install_names"); + } + + run`./configure + --enable-vendor + --prefix=${prefix} + --enable-ninja + --disable-docs # docs are online + --tools=clippy,rustdoc,rustfmt,analysis + --sysconfdir=/etc`; + run`make install`; +} diff --git a/projects/rust-lang.org/package.yml b/projects/rust-lang.org/package.yml new file mode 100644 index 00000000..70005e9f --- /dev/null +++ b/projects/rust-lang.org/package.yml @@ -0,0 +1,29 @@ +name: + Rust + +repository: + https://github.com/rust-lang/rust + +programs: + - bin/rustc + - bin/cargo + - bin/cargo-clippy + - bin/cargo-fmt + - bin/clippy-driver + - bin/rls + - bin/rust-analyzer + - bin/rust-gdb + - bin/rust-gdbgui + - bin/rust-lldb + - bin/rustdoc + - bin/rustfmt + +linux: + dependencies: + zlib.net: ^1 + companions: + - llvm.org + +platforms: + - darwin + - linux diff --git a/projects/rust-lang.org/test.ts b/projects/rust-lang.org/test.ts new file mode 100644 index 00000000..face931f --- /dev/null +++ b/projects/rust-lang.org/test.ts @@ -0,0 +1,12 @@ +import { run, undent } from "brewkit"; + +export default function () { + Deno.writeTextFileSync( + "test.rs", + undent` + fn main() { + println!("Hello World!"); + }`, + ); + run`rustc test.rs -o hello --crate-name hello`; +} diff --git a/projects/sourceware.org/bzip2/Makefile-libbz2_dylib b/projects/sourceware.org/bzip2/Makefile-libbz2_dylib new file mode 100644 index 00000000..13d08d9b --- /dev/null +++ b/projects/sourceware.org/bzip2/Makefile-libbz2_dylib @@ -0,0 +1,44 @@ +# This Makefile builds a shared version of the library, +# libbz2.dylib for MacOSX x86 (10.13.4 or higher), +# It is a custom Makefile. Use at own risk. +# Run in your MacOS terminal with the following command: +# make -f Makefile-libbz2_dylib + +PKG_VERSION?=1.0.8 +PREFIX?=/usr/local + +# SHELL=/bin/sh +# CC=gcc +BIGFILES=-D_FILE_OFFSET_BITS=64 +CFLAGS=-fpic -fPIC -Wall -Winline -O2 -g $(BIGFILES) + +OBJS= blocksort.o \ + huffman.o \ + crctable.o \ + randtable.o \ + compress.o \ + decompress.o \ + bzlib.o + +all: $(OBJS) + $(CC) -shared -Wl,-install_name -Wl,libbz2.dylib -o libbz2.${PKG_VERSION}.dylib $(OBJS) + cp libbz2.${PKG_VERSION}.dylib ${PREFIX}/lib/ + ln -s libbz2.${PKG_VERSION}.dylib ${PREFIX}/lib/libbz2.dylib + +clean: + rm -f libbz2.dylib libbz2.${PKG_VERSION}.dylib + +blocksort.o: blocksort.c + $(CC) $(CFLAGS) -c blocksort.c +huffman.o: huffman.c + $(CC) $(CFLAGS) -c huffman.c +crctable.o: crctable.c + $(CC) $(CFLAGS) -c crctable.c +randtable.o: randtable.c + $(CC) $(CFLAGS) -c randtable.c +compress.o: compress.c + $(CC) $(CFLAGS) -c compress.c +decompress.o: decompress.c + $(CC) $(CFLAGS) -c decompress.c +bzlib.o: bzlib.c + $(CC) $(CFLAGS) -c bzlib.c diff --git a/projects/sourceware.org/bzip2/build.ts b/projects/sourceware.org/bzip2/build.ts new file mode 100644 index 00000000..2c6cf1e4 --- /dev/null +++ b/projects/sourceware.org/bzip2/build.ts @@ -0,0 +1,23 @@ +import { BuildOptions, flatmap, Path, run, unarchive } from "brewkit"; +import { expandGlob } from "jsr:@std/fs@1/expand-glob"; + +export default async function build({ prefix, version, props }: BuildOptions) { + await unarchive(`https://sourceware.org/pub/bzip2/bzip2-${version}.tar.gz`); + + run`make PREFIX=${prefix}`; + run`make PREFIX=${prefix} install`; + flatmap(lib(), (lib) => run`make PREFIX=${prefix} --file ${lib}`); + + for await (const { path } of expandGlob("*.so.*")) { + Path.cwd().join(path).mv({ into: prefix.join("lib") }); + } + + function lib() { + switch (Deno.build.os) { + case "linux": + return "Makefile-libbz2_so"; + case "darwin": + return props.join("Makefile-libbz2_dylib").string; + } + } +} diff --git a/projects/sourceware.org/bzip2/package.yml b/projects/sourceware.org/bzip2/package.yml new file mode 100644 index 00000000..9021324b --- /dev/null +++ b/projects/sourceware.org/bzip2/package.yml @@ -0,0 +1,22 @@ +name: + Bzip2 + +repository: + https://gitlab.com/federicomenaquintero/bzip2 + +programs: + - bin/bunzip2 + - bin/bzcat + - bin/bzcmp + - bin/bzdiff + - bin/bzgrep + - bin/bzegrep + - bin/bzfgrep + - bin/bzip2 + - bin/bzip2recover + - bin/bzmore + - bin/bzless + +platforms: + - darwin + - linux diff --git a/projects/sourceware.org/bzip2/test.ts b/projects/sourceware.org/bzip2/test.ts new file mode 100644 index 00000000..0ad677b8 --- /dev/null +++ b/projects/sourceware.org/bzip2/test.ts @@ -0,0 +1,37 @@ +import { assertEquals } from "jsr:@std/assert@^1"; +import { Path, run } from "brewkit"; + +function nonce(length = 32): string { + const base62 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; + return Array.from(crypto.getRandomValues(new Uint8Array(length))) + .map((n) => base62[n % 62]) // Map random bytes to Base62 characters + .join(""); +} + +export default async function () { + const sample = nonce(); + + const compress = new Deno.Command("bzip2", { + stdin: "piped", + stdout: "piped", + }).spawn(); + const decompress = new Deno.Command("bunzip2", { + args: ["-d"], + stdin: "piped", + stdout: "piped", + }).spawn(); + + compress.stdout.pipeTo(decompress.stdin); + + const writer = await compress.stdin.getWriter(); + writer.write(new TextEncoder().encode(sample)); + writer.close(); + + const output = new TextDecoder().decode((await decompress.output()).stdout); + + assertEquals(output, sample); + + const f = Path.cwd().join("file").write(`hi\n${sample}\nhi`); + run`bzip2 ${f}`; + run`bzegrep ${sample} ./file.bz2`; +} diff --git a/projects/sourceware.org/bzip2/versions.ts b/projects/sourceware.org/bzip2/versions.ts new file mode 100644 index 00000000..666d760b --- /dev/null +++ b/projects/sourceware.org/bzip2/versions.ts @@ -0,0 +1,11 @@ +import { github, semver } from "brewkit"; + +export default async function () { + return (await github.tags("libarchive/bzip2")).compact( + ({ name: tag }) => { + tag = tag.replace(/^bzip2-/, ""); + const version = semver.parse(tag); + return version && { tag, version }; + }, + ); +} diff --git a/projects/sourceware.org/libffi/build.ts b/projects/sourceware.org/libffi/build.ts new file mode 100644 index 00000000..68184c79 --- /dev/null +++ b/projects/sourceware.org/libffi/build.ts @@ -0,0 +1,8 @@ +import { BuildOptions, run, unarchive } from "brewkit"; + +export default async function build({ prefix, version, tag }: BuildOptions) { + await unarchive(`https://github.com/libffi/libffi/releases/download/${tag}/libffi-${version}.tar.gz`); + + run`./configure --prefix=${prefix} --disable-debug`; + run`make --jobs ${navigator.hardwareConcurrency} install`; +} diff --git a/projects/sourceware.org/libffi/package.yml b/projects/sourceware.org/libffi/package.yml new file mode 100644 index 00000000..58c79dd8 --- /dev/null +++ b/projects/sourceware.org/libffi/package.yml @@ -0,0 +1,9 @@ +name: + libffi + +repository: + https://github.com/libffi/libffi + +platforms: + - darwin + - linux diff --git a/projects/sourceware.org/libffi/test.c b/projects/sourceware.org/libffi/test.c new file mode 100644 index 00000000..aeadb33f --- /dev/null +++ b/projects/sourceware.org/libffi/test.c @@ -0,0 +1,38 @@ +#include +#include +/* Acts like puts with the file given at time of enclosure. */ +void puts_binding(ffi_cif *cif, unsigned int *ret, void* args[], + FILE *stream) +{ + *ret = fputs(*(char **)args[0], stream); +} +int main() +{ + ffi_cif cif; + ffi_type *args[1]; + ffi_closure *closure; + int (*bound_puts)(char *); + int rc; + /* Allocate closure and bound_puts */ + closure = ffi_closure_alloc(sizeof(ffi_closure), &bound_puts); + if (closure) + { + /* Initialize the argument info vectors */ + args[0] = &ffi_type_pointer; + /* Initialize the cif */ + if (ffi_prep_cif(&cif, FFI_DEFAULT_ABI, 1, + &ffi_type_uint, args) == FFI_OK) + { + /* Initialize the closure, setting stream to stdout */ + if (ffi_prep_closure_loc(closure, &cif, puts_binding, + stdout, bound_puts) == FFI_OK) + { + rc = bound_puts("Hello World!"); + /* rc now holds the result of the call to fputs */ + } + } + } + /* Deallocate both closure, and bound_puts */ + ffi_closure_free(closure); + return 0; +} \ No newline at end of file diff --git a/projects/sourceware.org/libffi/test.ts b/projects/sourceware.org/libffi/test.ts new file mode 100644 index 00000000..203118f0 --- /dev/null +++ b/projects/sourceware.org/libffi/test.ts @@ -0,0 +1,7 @@ +import { backticks, run } from "brewkit"; + +export default async function () { + const libs = await backticks`pkg-config --libs libffi`; + run`cc -o closure test.c ${libs} -Wno-incompatible-pointer-types`; + run`./closure`; +} diff --git a/projects/sqlite.org/build.ts b/projects/sqlite.org/build.ts new file mode 100644 index 00000000..87ad20d9 --- /dev/null +++ b/projects/sqlite.org/build.ts @@ -0,0 +1,44 @@ +import { BuildOptions, run, unarchive } from "brewkit"; + +export default async function ({ prefix, tag, year, deps }: BuildOptions & { year: string }) { + await unarchive(`https://sqlite.org/${year}/sqlite-autoconf-${tag}.tar.gz`); + + //copied from brew (without understanding why) + let cppflags = ` + -DSQLITE_ENABLE_API_ARMOR=1 + -DSQLITE_ENABLE_COLUMN_METADATA=1 + -DSQLITE_ENABLE_DBSTAT_VTAB=1 + -DSQLITE_ENABLE_FTS3=1 + -DSQLITE_ENABLE_FTS3_PARENTHESIS=1 + -DSQLITE_ENABLE_FTS5=1 + -DSQLITE_ENABLE_JSON1=1 + -DSQLITE_ENABLE_MEMORY_MANAGEMENT=1 + -DSQLITE_ENABLE_RTREE=1 + -DSQLITE_ENABLE_STAT4=1 + -DSQLITE_ENABLE_UNLOCK_NOTIFY=1 + -DSQLITE_USE_URI=1 + `; + // default value of MAX_VARIABLE_NUMBER is 999 which is too low for many + // applications. Set to 250000 (Same value used in Debian and Ubuntu). + cppflags += `-DSQLITE_MAX_VARIABLE_NUMBER=250000`; + Deno.env.set("CPPFLAGS", cppflags); + + let extra = ""; + if (Deno.build.os == "linux") { + extra = ` + --enable-readline + --disable-editline + "--with-readline-ldflags=-L${deps["gnu.org/readline"].prefix}/lib -L${ + deps["invisible-island.net/ncurses"].prefix + }/lib -lncursesw -lreadline -ltinfow" + `; + extra += ` --with-readline-cflags=-I${deps["gnu.org/readline"].prefix}/include`; + } + + run`./configure + --prefix=${prefix} + --enable-session + ${extra} + `; + run`make --jobs ${navigator.hardwareConcurrency} install`; +} diff --git a/projects/sqlite.org/package.yml b/projects/sqlite.org/package.yml new file mode 100644 index 00000000..4a85e89f --- /dev/null +++ b/projects/sqlite.org/package.yml @@ -0,0 +1,17 @@ +name: + SQLite + +repository: + https://github.com/sqlite/sqlite + +linux: + dependencies: + zlib.net: ^1 + gnu.org/readline: ^8 + +programs: + - bin/sqlite3 + +platforms: + - darwin + - linux diff --git a/projects/sqlite.org/test.ts b/projects/sqlite.org/test.ts new file mode 100644 index 00000000..e8e57f99 --- /dev/null +++ b/projects/sqlite.org/test.ts @@ -0,0 +1,5 @@ +import { run } from "brewkit"; + +export default async function () { + run`sqlite3 --version`; +} diff --git a/projects/sqlite.org/versions.ts b/projects/sqlite.org/versions.ts new file mode 100644 index 00000000..0e27d660 --- /dev/null +++ b/projects/sqlite.org/versions.ts @@ -0,0 +1,21 @@ +import { github, SemVer, semver } from "brewkit"; + +export default async function () { + const rsp = await fetch(`https://www.sqlite.org/chronology.html`); + const txt = await rsp.text(); + // + // 2025-02-18 + // 3.49.1 + // + const years = txt.matchAll(/(\d\d\d\d)-\d\d-\d\d/g)!.map(([, year]) => year).toArray(); + const tags = txt.matchAll(/data-sortkey='(\d\d\d\d\d\d\d)'/g)!.map((m) => m[1]).toArray(); + const versions = txt.matchAll(/>(\d+\.\d+\.\d+)<\/a>/g)!.map((m) => m[1]).toArray(); + // zip them all together + return versions.map((version, i) => { + return { + version: new SemVer(version), + year: years[i], + tag: tags[i], + }; + }); +} diff --git a/projects/tukaani.org/xz/build.ts b/projects/tukaani.org/xz/build.ts new file mode 100644 index 00000000..5fa21758 --- /dev/null +++ b/projects/tukaani.org/xz/build.ts @@ -0,0 +1,10 @@ +import { BuildOptions, run, unarchive } from "brewkit"; + +export default async function ({ prefix, tag, version }: BuildOptions) { + await unarchive( + `https://github.com/tukaani-project/xz/releases/download/${tag}/xz-${version}.tar.gz`, + ); + + run`./configure --prefix=${prefix} --disable-debug --disable-doc`; + run`make --jobs ${navigator.hardwareConcurrency} install`; +} diff --git a/projects/tukaani.org/xz/package.yml b/projects/tukaani.org/xz/package.yml new file mode 100644 index 00000000..f8244429 --- /dev/null +++ b/projects/tukaani.org/xz/package.yml @@ -0,0 +1,37 @@ +name: + XZ Utils + +repository: + https://github.com/tukaani-project/xz + +homepage: + https://tukaani.org + +programs: + - bin/lzcat + - bin/lzcmp + - bin/lzdiff + - bin/lzegrep + - bin/lzfgrep + - bin/lzgrep + - bin/lzless + - bin/lzma + - bin/lzmadec + - bin/lzmainfo + - bin/lzmore + - bin/unlzma + - bin/unxz + - bin/xz + - bin/xzcat + - bin/xzcmp + - bin/xzdec + - bin/xzdiff + - bin/xzegrep + - bin/xzfgrep + - bin/xzgrep + - bin/xzless + - bin/xzmore + +platforms: + - darwin + - linux diff --git a/projects/tukaani.org/xz/test.ts b/projects/tukaani.org/xz/test.ts new file mode 100644 index 00000000..12d6181b --- /dev/null +++ b/projects/tukaani.org/xz/test.ts @@ -0,0 +1,12 @@ +import { backticks, TestOptions, undent } from "brewkit"; +import { assertEquals } from "jsr:@std/assert@^1"; + +export default async function ({ version }: TestOptions) { + assertEquals( + await backticks`xz --version`, + undent` + xz (XZ Utils) ${version} + liblzma ${version} + `.trim(), + ); +} diff --git a/projects/zlib.net/build.ts b/projects/zlib.net/build.ts new file mode 100644 index 00000000..632d7187 --- /dev/null +++ b/projects/zlib.net/build.ts @@ -0,0 +1,19 @@ +import { BuildOptions, run, unarchive } from "brewkit"; + +export default async function ({ prefix, tag }: BuildOptions) { + await unarchive(`https://zlib.net/zlib-${tag.slice(1)}.tar.gz`); + + if (Deno.build.os == "linux") { + // undefined symbol errors in newer llvms prevent building shared libs + Deno.env.set("CFLAGS", "-Wl,--undefined-version"); + } + + if (Deno.build.os == "windows") { + run`cmake -B build -DCMAKE_INSTALL_PREFIX=${prefix}`; + run`cmake --build build --config Release`; + run`cmake --install build`; + } else { + run`./configure --prefix=${prefix} --enable-shared`; + run`make --jobs ${navigator.hardwareConcurrency} install`; + } +} diff --git a/projects/zlib.net/package.yml b/projects/zlib.net/package.yml new file mode 100644 index 00000000..ea9dbfb8 --- /dev/null +++ b/projects/zlib.net/package.yml @@ -0,0 +1,9 @@ +name: + zlib + +repository: + https://github.com/madler/zlib + +platforms: + - darwin + - linux diff --git a/projects/zlib.net/test.c b/projects/zlib.net/test.c new file mode 100644 index 00000000..77f615b8 --- /dev/null +++ b/projects/zlib.net/test.c @@ -0,0 +1,207 @@ +// origin: https://zlib.net/zpipe.c + +/* zpipe.c: example of proper use of zlib's inflate() and deflate() + Not copyrighted -- provided to the public domain + Version 1.4 11 December 2005 Mark Adler */ + +/* Version history: + 1.0 30 Oct 2004 First version + 1.1 8 Nov 2004 Add void casting for unused return values + Use switch statement for inflate() return values + 1.2 9 Nov 2004 Add assertions to document zlib guarantees + 1.3 6 Apr 2005 Remove incorrect assertion in inf() + 1.4 11 Dec 2005 Add hack to avoid MSDOS end-of-line conversions + Avoid some compiler warnings for input and output buffers + */ + +#include +#include +#include +#include "zlib.h" + +#if defined(MSDOS) || defined(OS2) || defined(WIN32) || defined(__CYGWIN__) +# include +# include +# define SET_BINARY_MODE(file) setmode(fileno(file), O_BINARY) +#else +# define SET_BINARY_MODE(file) +#endif + +#define CHUNK 16384 + +/* Compress from file source to file dest until EOF on source. + def() returns Z_OK on success, Z_MEM_ERROR if memory could not be + allocated for processing, Z_STREAM_ERROR if an invalid compression + level is supplied, Z_VERSION_ERROR if the version of zlib.h and the + version of the library linked do not match, or Z_ERRNO if there is + an error reading or writing the files. */ +int def(FILE *source, FILE *dest, int level) +{ + int ret, flush; + unsigned have; + z_stream strm; + unsigned char in[CHUNK]; + unsigned char out[CHUNK]; + + /* allocate deflate state */ + strm.zalloc = Z_NULL; + strm.zfree = Z_NULL; + strm.opaque = Z_NULL; + ret = deflateInit(&strm, level); + if (ret != Z_OK) + return ret; + + /* compress until end of file */ + do { + strm.avail_in = fread(in, 1, CHUNK, source); + if (ferror(source)) { + (void)deflateEnd(&strm); + return Z_ERRNO; + } + flush = feof(source) ? Z_FINISH : Z_NO_FLUSH; + strm.next_in = in; + + /* run deflate() on input until output buffer not full, finish + compression if all of source has been read in */ + do { + strm.avail_out = CHUNK; + strm.next_out = out; + ret = deflate(&strm, flush); /* no bad return value */ + assert(ret != Z_STREAM_ERROR); /* state not clobbered */ + have = CHUNK - strm.avail_out; + if (fwrite(out, 1, have, dest) != have || ferror(dest)) { + (void)deflateEnd(&strm); + return Z_ERRNO; + } + } while (strm.avail_out == 0); + assert(strm.avail_in == 0); /* all input will be used */ + + /* done when last data in file processed */ + } while (flush != Z_FINISH); + assert(ret == Z_STREAM_END); /* stream will be complete */ + + /* clean up and return */ + (void)deflateEnd(&strm); + return Z_OK; +} + +/* Decompress from file source to file dest until stream ends or EOF. + inf() returns Z_OK on success, Z_MEM_ERROR if memory could not be + allocated for processing, Z_DATA_ERROR if the deflate data is + invalid or incomplete, Z_VERSION_ERROR if the version of zlib.h and + the version of the library linked do not match, or Z_ERRNO if there + is an error reading or writing the files. */ +int inf(FILE *source, FILE *dest) +{ + int ret; + unsigned have; + z_stream strm; + unsigned char in[CHUNK]; + unsigned char out[CHUNK]; + + /* allocate inflate state */ + strm.zalloc = Z_NULL; + strm.zfree = Z_NULL; + strm.opaque = Z_NULL; + strm.avail_in = 0; + strm.next_in = Z_NULL; + ret = inflateInit(&strm); + if (ret != Z_OK) + return ret; + + /* decompress until deflate stream ends or end of file */ + do { + strm.avail_in = fread(in, 1, CHUNK, source); + if (ferror(source)) { + (void)inflateEnd(&strm); + return Z_ERRNO; + } + if (strm.avail_in == 0) + break; + strm.next_in = in; + + /* run inflate() on input until output buffer not full */ + do { + strm.avail_out = CHUNK; + strm.next_out = out; + ret = inflate(&strm, Z_NO_FLUSH); + assert(ret != Z_STREAM_ERROR); /* state not clobbered */ + switch (ret) { + case Z_NEED_DICT: + ret = Z_DATA_ERROR; /* and fall through */ + case Z_DATA_ERROR: + case Z_MEM_ERROR: + (void)inflateEnd(&strm); + return ret; + } + have = CHUNK - strm.avail_out; + if (fwrite(out, 1, have, dest) != have || ferror(dest)) { + (void)inflateEnd(&strm); + return Z_ERRNO; + } + } while (strm.avail_out == 0); + + /* done when inflate() says it's done */ + } while (ret != Z_STREAM_END); + + /* clean up and return */ + (void)inflateEnd(&strm); + return ret == Z_STREAM_END ? Z_OK : Z_DATA_ERROR; +} + +/* report a zlib or i/o error */ +void zerr(int ret) +{ + fputs("zpipe: ", stderr); + switch (ret) { + case Z_ERRNO: + if (ferror(stdin)) + fputs("error reading stdin\n", stderr); + if (ferror(stdout)) + fputs("error writing stdout\n", stderr); + break; + case Z_STREAM_ERROR: + fputs("invalid compression level\n", stderr); + break; + case Z_DATA_ERROR: + fputs("invalid or incomplete deflate data\n", stderr); + break; + case Z_MEM_ERROR: + fputs("out of memory\n", stderr); + break; + case Z_VERSION_ERROR: + fputs("zlib version mismatch!\n", stderr); + } +} + +/* compress or decompress from stdin to stdout */ +int main(int argc, char **argv) +{ + int ret; + + /* avoid end-of-line conversions */ + SET_BINARY_MODE(stdin); + SET_BINARY_MODE(stdout); + + /* do compression if no arguments */ + if (argc == 1) { + ret = def(stdin, stdout, Z_DEFAULT_COMPRESSION); + if (ret != Z_OK) + zerr(ret); + return ret; + } + + /* do decompression if -d specified */ + else if (argc == 2 && strcmp(argv[1], "-d") == 0) { + ret = inf(stdin, stdout); + if (ret != Z_OK) + zerr(ret); + return ret; + } + + /* otherwise, report usage */ + else { + fputs("zpipe usage: zpipe [-d] < source > dest\n", stderr); + return 1; + } +} diff --git a/projects/zlib.net/test.ts b/projects/zlib.net/test.ts new file mode 100644 index 00000000..032b28bc --- /dev/null +++ b/projects/zlib.net/test.ts @@ -0,0 +1,28 @@ +import { assertEquals } from "jsr:@std/assert@^1"; +import { run } from "brewkit"; + +export default async function () { + run`cc test.c -lz`; + + const sample = "Hello, World!"; //TODO random + + const compress = new Deno.Command("./a.out", { + stdin: "piped", + stdout: "piped", + }).spawn(); + const decompress = new Deno.Command("./a.out", { + args: ["-d"], + stdin: "piped", + stdout: "piped", + }).spawn(); + + compress.stdout.pipeTo(decompress.stdin); + + const writer = await compress.stdin.getWriter(); + writer.write(new TextEncoder().encode(sample)); + writer.close(); + + const output = new TextDecoder().decode((await decompress.output()).stdout); + + assertEquals(output, sample); +}