1
0
Fork 0
mirror of https://github.com/zhaofengli/attic.git synced 2024-12-14 11:57:30 +00:00

Merge branch 'main' into HEAD

This commit is contained in:
Zhaofeng Li 2024-10-05 11:50:16 -06:00
commit 5d6560e409
45 changed files with 1612 additions and 540 deletions

1
.ci/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
/cached-shell

60
.ci/build-and-push-images.sh Executable file
View file

@ -0,0 +1,60 @@
#!/usr/bin/env bash
set -euo pipefail
if [[ "$#" -lt "2" ]]; then
>&2 echo "Usage: $0 <image name> <tag1> ..."
>&2 echo "Example: $0 ghcr.io/zhaofengli/attic main abcd123"
exit 1
fi
cleanup() {
if [[ -f "${manifest_spec}" ]]; then
rm "${manifest_spec}"
fi
}
trap cleanup EXIT
image_name="$1"
tags=("${@:2}")
manifest_spec="$(mktemp -t attic-manifest-spec.XXXXXXXXXX)"
declare -a digests
emit_header() {
echo "image: ${image_name}"
echo "tags:"
for tag in "${tags[@]}"; do
echo "- ${tag}"
done
echo "manifests:"
}
push_digest() {
source_image="docker-archive:$1"
digest="$(skopeo inspect "${source_image}" | jq -r .Digest)"
target_image="docker://${image_name}@${digest}"
>&2 echo "${source_image}${target_image}"
>&2 skopeo copy --insecure-policy "${source_image}" "${target_image}"
echo -n "- "
skopeo inspect "${source_image}" | \
jq '{platform: {architecture: .Architecture, os: .Os}, image: ($image_name + "@" + .Digest)}' \
--arg image_name "${image_name}"
}
>>"${manifest_spec}" emit_header
nix build .#attic-server-image .#attic-server-image-aarch64 -L --print-out-paths | \
while read -r output; do
>>"${manifest_spec}" push_digest "${output}"
done
>&2 echo "----------"
>&2 echo "Generated manifest-tool spec:"
>&2 echo "----------"
cat "${manifest_spec}"
>&2 echo "----------"
manifest-tool push from-spec "${manifest_spec}"

5
.ci/cache-shell.sh Executable file
View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source "$(dirname "${BASH_SOURCE[0]}")/common.sh"
>&2 echo "Caching dev shell"
nix print-dev-env "${base}#" >"${cached_shell}"

7
.ci/common.sh Normal file
View file

@ -0,0 +1,7 @@
# Use as:
#
# source "$(dirname "${BASH_SOURCE[0]}")/common.sh"
set -euo pipefail
base="$(readlink -f $(dirname "${BASH_SOURCE[0]}")/..)"
cached_shell="${base}/.ci/cached-shell"

10
.ci/run Executable file
View file

@ -0,0 +1,10 @@
#!/usr/bin/env bash
source "$(dirname "${BASH_SOURCE[0]}")/common.sh"
if [[ ! -f "${cached_shell}" ]]; then
>&2 echo "No cached shell in ${cached_shell}"
exit 1
fi
. "${cached_shell}"
exec "$@"

View file

@ -16,9 +16,9 @@ jobs:
if: github.repository == 'zhaofengli/attic'
steps:
- uses: actions/checkout@v4.1.1
- uses: actions/checkout@v4.1.7
- uses: DeterminateSystems/nix-installer-action@v9
- uses: DeterminateSystems/nix-installer-action@v14
continue-on-error: true # Self-hosted runners already have Nix installed
- name: Install Attic
@ -40,12 +40,12 @@ jobs:
cp --recursive --dereference --no-preserve=mode,ownership result public
- name: Upload book artifact
uses: actions/upload-pages-artifact@v2.0.0
uses: actions/upload-pages-artifact@v3.0.1
with:
path: public
- name: Deploy book
uses: actions/deploy-pages@v3.0.1
uses: actions/deploy-pages@v4.0.5
# TODO: Just take a diff of the list of store paths, also abstract all of this out
- name: Push build artifacts

View file

@ -4,7 +4,7 @@ on:
push:
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
IMAGE_NAME: ghcr.io/${{ github.repository }}
jobs:
tests:
strategy:
@ -12,14 +12,20 @@ jobs:
os:
- ubuntu-latest
- macos-latest
nix:
- "2.20"
- "2.24"
- "default"
runs-on: ${{ matrix.os }}
permissions:
contents: read
packages: write
steps:
- uses: actions/checkout@v4.1.1
- uses: actions/checkout@v4.1.7
- uses: DeterminateSystems/nix-installer-action@v9
- name: Install current Bash on macOS
if: runner.os == 'macOS'
run: |
command -v brew && brew install bash || true
- uses: DeterminateSystems/nix-installer-action@v14
continue-on-error: true # Self-hosted runners already have Nix installed
- name: Install Attic
@ -29,6 +35,7 @@ jobs:
fi
- name: Configure Attic
continue-on-error: true
run: |
: "${ATTIC_SERVER:=https://staging.attic.rs/}"
: "${ATTIC_CACHE:=attic-ci}"
@ -41,45 +48,113 @@ jobs:
ATTIC_CACHE: ${{ secrets.ATTIC_CACHE }}
ATTIC_TOKEN: ${{ secrets.ATTIC_TOKEN }}
- name: Build and run tests
- name: Cache dev shell
run: |
.ci/cache-shell.sh
system=$(nix-instantiate --eval -E 'builtins.currentSystem')
echo system=$system >>$GITHUB_ENV
tests=$(nix build .#internal."$system".attic-tests --no-link --print-out-paths -L)
find "$tests/bin" -exec {} \;
- name: Run unit tests
run: |
.ci/run just ci-unit-tests ${{ matrix.nix }}
- name: Build WebAssembly crates
if: runner.os == 'Linux'
run: |
.ci/run just ci-build-wasm
# TODO: Just take a diff of the list of store paths, also abstract all of this out
- name: Push build artifacts
run: |
export PATH=$HOME/.nix-profile/bin:$PATH # FIXME
if [ -n "$ATTIC_TOKEN" ]; then
nix build .#internal."$system".attic-tests .#internal."$system".cargoArtifacts --no-link --print-out-paths -L | \
xargs attic push "ci:$ATTIC_CACHE"
nix build --no-link --print-out-paths -L \
.#internalMatrix."$system".\"${{ matrix.nix }}\".attic-tests \
.#internalMatrix."$system".\"${{ matrix.nix }}\".cargoArtifacts \
| xargs attic push "ci:$ATTIC_CACHE"
fi
image:
runs-on: ubuntu-latest
if: github.event_name == 'push'
needs:
- tests
permissions:
contents: read
packages: write
steps:
- uses: actions/checkout@v4.1.7
- name: Install current Bash on macOS
if: runner.os == 'macOS'
run: |
command -v brew && brew install bash || true
- uses: DeterminateSystems/nix-installer-action@v14
continue-on-error: true # Self-hosted runners already have Nix installed
- name: Install Attic
run: |
if ! command -v attic &> /dev/null; then
./.github/install-attic-ci.sh
fi
- name: Configure Attic
continue-on-error: true
run: |
: "${ATTIC_SERVER:=https://staging.attic.rs/}"
: "${ATTIC_CACHE:=attic-ci}"
echo ATTIC_CACHE=$ATTIC_CACHE >>$GITHUB_ENV
export PATH=$HOME/.nix-profile/bin:$PATH # FIXME
attic login --set-default ci "$ATTIC_SERVER" "$ATTIC_TOKEN"
attic use "$ATTIC_CACHE"
env:
ATTIC_SERVER: ${{ secrets.ATTIC_SERVER }}
ATTIC_CACHE: ${{ secrets.ATTIC_CACHE }}
ATTIC_TOKEN: ${{ secrets.ATTIC_TOKEN }}
- name: Cache dev shell
run: |
.ci/cache-shell.sh
system=$(nix-instantiate --eval -E 'builtins.currentSystem')
echo system=$system >>$GITHUB_ENV
- name: Log in to the Container registry
uses: docker/login-action@v3.0.0
if: runner.os == 'Linux' && github.event_name == 'push' && github.ref == format('refs/heads/{0}', github.event.repository.default_branch)
uses: docker/login-action@v3.3.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Push build container image
if: runner.os == 'Linux' && github.event_name == 'push' && github.ref == format('refs/heads/{0}', github.event.repository.default_branch)
- name: Build and push container images
continue-on-error: true
run: |
IMAGE_ID=ghcr.io/${IMAGE_NAME}
TARBALL=$(nix build --json .#attic-server-image | jq -r '.[].outputs.out')
BRANCH=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,')
TAG="${{ github.sha }}"
[[ "${{ github.ref }}" == "refs/tags/"* ]] && TAG=$(echo $BRANCH | sed -e 's/^v//')
docker load < ${TARBALL}
echo IMAGE_ID=$IMAGE_ID
echo TAG=$TAG
docker tag attic-server:main "${IMAGE_ID}:${TAG}"
docker push ${IMAGE_ID}:${TAG}
if [ "$BRANCH" == "main" ]; then
TAG="latest"
docker tag attic-server:main "${IMAGE_ID}:${TAG}"
docker push ${IMAGE_ID}:${TAG}
declare -a tags
tags+=("${{ github.sha }}")
branch=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,')
if [[ "${{ github.ref }}" == "refs/tags/"* ]]; then
tags+=("$(echo $branch | sed -e 's/^v//')")
else
tags+=("${branch}")
fi
if [ "$branch" == "${{ github.event.repository.default_branch }}" ]; then
tags+=("latest")
fi
>&2 echo "Image: ${IMAGE_NAME}"
>&2 echo "Tags: ${tags[@]}"
.ci/run just ci-build-and-push-images "${IMAGE_NAME}" "${tags[@]}"
# TODO: Just take a diff of the list of store paths, also abstract all of this out
- name: Push build artifacts
run: |
export PATH=$HOME/.nix-profile/bin:$PATH # FIXME
if [ -n "$ATTIC_TOKEN" ]; then
nix build --no-link --print-out-paths -L \
.#attic-server-image \
.#attic-server-image-aarch64 \
| xargs attic push "ci:$ATTIC_CACHE"
fi

49
.github/workflows/lint.yml vendored Normal file
View file

@ -0,0 +1,49 @@
name: Lint
on:
pull_request:
push:
jobs:
lint:
name: Lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4.1.7
- name: Install current Bash on macOS
if: runner.os == 'macOS'
run: |
command -v brew && brew install bash || true
- uses: DeterminateSystems/nix-installer-action@v14
continue-on-error: true # Self-hosted runners already have Nix installed
- name: Install Attic
run: |
if ! command -v attic &> /dev/null; then
./.github/install-attic-ci.sh
fi
- name: Configure Attic
run: |
: "${ATTIC_SERVER:=https://staging.attic.rs/}"
: "${ATTIC_CACHE:=attic-ci}"
echo ATTIC_CACHE=$ATTIC_CACHE >>$GITHUB_ENV
export PATH=$HOME/.nix-profile/bin:$PATH # FIXME
attic login --set-default ci "$ATTIC_SERVER" "$ATTIC_TOKEN"
attic use "$ATTIC_CACHE"
env:
ATTIC_SERVER: ${{ secrets.ATTIC_SERVER }}
ATTIC_CACHE: ${{ secrets.ATTIC_CACHE }}
ATTIC_TOKEN: ${{ secrets.ATTIC_TOKEN }}
- name: Cache dev shell
run: |
.ci/cache-shell.sh
system=$(nix-instantiate --eval -E 'builtins.currentSystem')
echo system=$system >>$GITHUB_ENV
- name: Check rustfmt
run: .ci/run just ci-rustfmt

250
Cargo.lock generated
View file

@ -92,6 +92,12 @@ dependencies = [
"libc",
]
[[package]]
name = "anes"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
[[package]]
name = "anstream"
version = "0.6.14"
@ -230,11 +236,14 @@ dependencies = [
"async-stream",
"base64 0.22.1",
"bytes",
"cc",
"criterion",
"cxx",
"cxx-build",
"digest",
"displaydoc",
"ed25519-compact",
"fastcdc",
"futures",
"hex",
"lazy_static",
@ -249,7 +258,7 @@ dependencies = [
"sha2",
"tempfile",
"tokio",
"tokio-test",
"version-compare",
"wildmatch",
"xdg",
]
@ -307,7 +316,6 @@ dependencies = [
"digest",
"displaydoc",
"enum-as-inner",
"fastcdc",
"futures",
"hex",
"http-body-util",
@ -326,7 +334,6 @@ dependencies = [
"serde_with",
"sha2",
"tokio",
"tokio-test",
"tokio-util",
"toml",
"tower-http",
@ -1051,14 +1058,20 @@ dependencies = [
]
[[package]]
name = "cc"
version = "1.0.98"
name = "cast"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41c270e7540d725e65ac7f1b212ac8ce349719624d7bcff99f8e2e488e8cf03f"
checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
[[package]]
name = "cc"
version = "1.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72db2f7947ecee9b03b510377e8bb9077afa27176fdbff55c51027e976fdcc48"
dependencies = [
"jobserver",
"libc",
"once_cell",
"shlex",
]
[[package]]
@ -1088,6 +1101,33 @@ dependencies = [
"windows-targets 0.52.5",
]
[[package]]
name = "ciborium"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e"
dependencies = [
"ciborium-io",
"ciborium-ll",
"serde",
]
[[package]]
name = "ciborium-io"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757"
[[package]]
name = "ciborium-ll"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9"
dependencies = [
"ciborium-io",
"half",
]
[[package]]
name = "clap"
version = "4.5.4"
@ -1307,6 +1347,44 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "criterion"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f"
dependencies = [
"anes",
"cast",
"ciborium",
"clap",
"criterion-plot",
"futures",
"is-terminal",
"itertools 0.10.5",
"num-traits",
"once_cell",
"oorandom",
"plotters",
"rayon",
"regex",
"serde",
"serde_derive",
"serde_json",
"tinytemplate",
"tokio",
"walkdir",
]
[[package]]
name = "criterion-plot"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1"
dependencies = [
"cast",
"itertools 0.10.5",
]
[[package]]
name = "crossbeam-channel"
version = "0.5.13"
@ -1316,6 +1394,25 @@ dependencies = [
"crossbeam-utils",
]
[[package]]
name = "crossbeam-deque"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d"
dependencies = [
"crossbeam-epoch",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-epoch"
version = "0.9.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
dependencies = [
"crossbeam-utils",
]
[[package]]
name = "crossbeam-queue"
version = "0.3.11"
@ -1331,6 +1428,12 @@ version = "0.8.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80"
[[package]]
name = "crunchy"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"
[[package]]
name = "crypto-bigint"
version = "0.4.9"
@ -1698,6 +1801,11 @@ name = "fastcdc"
version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a71061d097bfa9a5a4d2efdec57990d9a88745020b365191d37e48541a1628f2"
dependencies = [
"async-stream",
"tokio",
"tokio-stream",
]
[[package]]
name = "fastrand"
@ -1956,6 +2064,16 @@ dependencies = [
"tracing",
]
[[package]]
name = "half"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888"
dependencies = [
"cfg-if",
"crunchy",
]
[[package]]
name = "hashbrown"
version = "0.12.3"
@ -2388,12 +2506,32 @@ version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3"
[[package]]
name = "is-terminal"
version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b"
dependencies = [
"hermit-abi",
"libc",
"windows-sys 0.52.0",
]
[[package]]
name = "is_terminal_polyfill"
version = "1.70.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800"
[[package]]
name = "itertools"
version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
dependencies = [
"either",
]
[[package]]
name = "itertools"
version = "0.12.1"
@ -2642,8 +2780,9 @@ dependencies = [
[[package]]
name = "nix-base32"
version = "0.1.2-alpha.0"
source = "git+https://github.com/zhaofengli/nix-base32.git?rev=b850c6e9273d1c39bd93abb704a53345f5be92eb#b850c6e9273d1c39bd93abb704a53345f5be92eb"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2628953ed836273ee4262e3708a8ef63ca38bd8a922070626eef7f9e5d8d536"
[[package]]
name = "nom"
@ -2776,6 +2915,12 @@ version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
[[package]]
name = "oorandom"
version = "11.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9"
[[package]]
name = "openssl-probe"
version = "0.1.5"
@ -3002,6 +3147,34 @@ version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec"
[[package]]
name = "plotters"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a15b6eccb8484002195a3e44fe65a4ce8e93a625797a063735536fd59cb01cf3"
dependencies = [
"num-traits",
"plotters-backend",
"plotters-svg",
"wasm-bindgen",
"web-sys",
]
[[package]]
name = "plotters-backend"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "414cec62c6634ae900ea1c56128dfe87cf63e7caece0852ec76aba307cebadb7"
[[package]]
name = "plotters-svg"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81b30686a7d9c3e010b84284bdd26a29f2138574f52f5eb6f794fc0ad924e705"
dependencies = [
"plotters-backend",
]
[[package]]
name = "portable-atomic"
version = "1.6.0"
@ -3088,7 +3261,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1"
dependencies = [
"anyhow",
"itertools",
"itertools 0.12.1",
"proc-macro2",
"quote",
"syn 2.0.66",
@ -3168,6 +3341,26 @@ dependencies = [
"getrandom",
]
[[package]]
name = "rayon"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa"
dependencies = [
"either",
"rayon-core",
]
[[package]]
name = "rayon-core"
version = "1.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2"
dependencies = [
"crossbeam-deque",
"crossbeam-utils",
]
[[package]]
name = "redox_syscall"
version = "0.4.1"
@ -3962,6 +4155,12 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde"
[[package]]
name = "shlex"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "signal-hook-registry"
version = "1.4.2"
@ -4063,7 +4262,7 @@ version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce81b7bd7c4493975347ef60d8c7e8b742d4694f4c49f93e0a12ea263938176c"
dependencies = [
"itertools",
"itertools 0.12.1",
"nom",
"unicode_categories",
]
@ -4453,6 +4652,16 @@ dependencies = [
"time-core",
]
[[package]]
name = "tinytemplate"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc"
dependencies = [
"serde",
"serde_json",
]
[[package]]
name = "tinyvec"
version = "1.6.0"
@ -4540,19 +4749,6 @@ dependencies = [
"tokio",
]
[[package]]
name = "tokio-test"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7"
dependencies = [
"async-stream",
"bytes",
"futures-core",
"tokio",
"tokio-stream",
]
[[package]]
name = "tokio-util"
version = "0.7.11"
@ -4893,6 +5089,12 @@ version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]]
name = "version-compare"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "852e951cb7832cb45cb1169900d19760cfa39b82bc0ea9c0e5a14ae88411c98b"
[[package]]
name = "version_check"
version = "0.9.4"

View file

@ -11,11 +11,12 @@ bytes = "1.4.0"
displaydoc = "0.2.4"
digest = "0.10.7"
ed25519-compact = "2.0.4"
fastcdc = "3.0.3"
futures = "0.3.28"
hex = "0.4.3"
lazy_static = "1.4.0"
log = "0.4.18"
nix-base32 = { git = "https://github.com/zhaofengli/nix-base32.git", rev = "b850c6e9273d1c39bd93abb704a53345f5be92eb" }
nix-base32 = "0.2.0"
regex = "1.8.3"
serde = { version = "1.0.163", features = ["derive"] }
serde_yaml = "0.9.21"
@ -32,30 +33,51 @@ cxx = { version = "1.0", optional = true }
version = "1.28.2"
optional = true
features = [
"fs",
"io-util",
"process",
"macros",
"sync",
]
[dev-dependencies]
criterion = { version = "0.5", features = ["html_reports", "async_tokio"] }
fastcdc = { version = "*", features = ["tokio"] }
serde_json = "1.0.96"
tokio-test = "0.4.2"
[build-dependencies]
cc = "1.1.13"
cxx-build = { version = "1.0", optional = true }
pkg-config = "0.3.27"
tempfile = "3"
version-compare = "0.2.0"
[features]
default = [ "nix_store", "tokio" ]
default = [
"chunking",
"nix_store",
"stream",
"tokio",
]
# Chunking.
chunking = ["tokio", "stream", "dep:async-stream"]
# Native libnixstore bindings.
#
# When disabled, the native Rust portions of nix_store can still be used.
nix_store = [ "dep:cxx", "dep:cxx-build", "tokio/rt" ]
nix_store = [
"tokio",
"tokio/fs",
"tokio/process",
"dep:cxx",
"dep:cxx-build",
]
# Tokio.
#
# When disabled, any part depending on tokio is unavailable.
tokio = [ "dep:tokio", "dep:async-stream" ]
# Stream utilities.
stream = ["tokio", "dep:async-stream"]
# Tokio runtime.
tokio = ["dep:tokio", "tokio/rt", "tokio/time"]
[[bench]]
name = "chunking"
harness = false

84
attic/benches/chunking.rs Normal file
View file

@ -0,0 +1,84 @@
use std::io::Cursor;
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion, Throughput};
use futures::StreamExt;
use attic::chunking::chunk_stream;
use attic::testing::{get_fake_data, get_runtime};
struct Parameters {
min_size: u32,
avg_size: u32,
max_size: u32,
}
pub fn bench_chunking(c: &mut Criterion) {
let rt = get_runtime();
let data = get_fake_data(128 * 1024 * 1024); // 128 MiB
let cases = [
(
"2K,4K,8K",
Parameters {
min_size: 2 * 1024,
avg_size: 4 * 1024,
max_size: 8 * 1024,
},
),
(
"8K,16K,32K",
Parameters {
min_size: 8 * 1024,
avg_size: 16 * 1024,
max_size: 32 * 1024,
},
),
(
"1M,4M,16M",
Parameters {
min_size: 1024 * 1024,
avg_size: 4 * 1024 * 1024,
max_size: 16 * 1024 * 1024,
},
),
];
let mut group = c.benchmark_group("chunking");
group.throughput(Throughput::Bytes(data.len() as u64));
for (case, params) in cases {
group.bench_with_input(BenchmarkId::new("ronomon", case), &params, |b, params| {
b.to_async(&rt).iter(|| async {
let cursor = Cursor::new(&data);
let mut chunks = chunk_stream(
cursor,
params.min_size as usize,
params.avg_size as usize,
params.max_size as usize,
);
while let Some(chunk) = chunks.next().await {
black_box(chunk).unwrap();
}
})
});
group.bench_with_input(BenchmarkId::new("v2020", case), &params, |b, params| {
b.to_async(&rt).iter(|| async {
let cursor = Cursor::new(&data);
let mut chunks = fastcdc::v2020::AsyncStreamCDC::new(
cursor,
params.min_size,
params.avg_size,
params.max_size,
);
let mut chunks = Box::pin(chunks.as_stream());
while let Some(chunk) = chunks.next().await {
black_box(chunk).unwrap();
}
})
});
}
group.finish();
}
criterion_group!(benches, bench_chunking);
criterion_main!(benches);

View file

@ -4,41 +4,83 @@
fn main() {
#[cfg(feature = "nix_store")]
build_bridge();
nix_store::build_bridge();
}
#[cfg(feature = "nix_store")]
fn build_bridge() {
// Temporary workaround for issue in <https://github.com/NixOS/nix/pull/8484>
let hacky_include = {
let dir = tempfile::tempdir().expect("Failed to create temporary directory for workaround");
std::fs::write(dir.path().join("uds-remote-store.md"), "\"\"").unwrap();
dir
};
mod nix_store {
use cc::Build;
use version_compare::Version;
cxx_build::bridge("src/nix_store/bindings/mod.rs")
.file("src/nix_store/bindings/nix.cpp")
.flag("-std=c++2a")
.flag("-O2")
.flag("-include")
.flag("nix/config.h")
.flag("-idirafter")
.flag(hacky_include.path().to_str().unwrap())
// In Nix 2.19+, nix/args/root.hh depends on being able to #include "args.hh" (which is in its parent directory), for some reason
.flag("-I")
.flag(concat!(env!("NIX_INCLUDE_PATH"), "/nix"))
.compile("nixbinding");
struct NixDependency {
version: String,
}
println!("cargo:rerun-if-changed=src/nix_store/bindings");
impl NixDependency {
fn detect() -> Self {
let library = pkg_config::Config::new()
.cargo_metadata(false)
.atleast_version("2.4")
.probe("nix-main")
.expect("Failed to find nix-main >=2.4 through pkg-config");
// the -l flags must be after -lnixbinding
pkg_config::Config::new()
.atleast_version("2.4")
.probe("nix-store")
.unwrap();
Self {
version: library.version,
}
}
pkg_config::Config::new()
.atleast_version("2.4")
.probe("nix-main")
.unwrap();
fn apply_version_flags(&self, build: &mut Build) {
let version = Version::from(&self.version).unwrap();
if version >= Version::from("2.20").unwrap() {
build.flag("-DATTIC_NIX_2_20");
}
}
fn emit_cargo_metadata(&self) {
pkg_config::Config::new()
.atleast_version("2.4")
.probe("nix-store")
.unwrap();
pkg_config::Config::new()
.atleast_version("2.4")
.probe("nix-main")
.unwrap();
}
}
pub fn build_bridge() {
let nix_dep = NixDependency::detect();
// Temporary workaround for issue in <https://github.com/NixOS/nix/pull/8484>
let hacky_include = {
let dir =
tempfile::tempdir().expect("Failed to create temporary directory for workaround");
std::fs::write(dir.path().join("uds-remote-store.md"), "\"\"").unwrap();
dir
};
let mut build = cxx_build::bridge("src/nix_store/bindings/mod.rs");
build
.file("src/nix_store/bindings/nix.cpp")
.flag("-std=c++2a")
.flag("-O2")
.flag("-include")
.flag("nix/config.h")
.flag("-idirafter")
.flag(hacky_include.path().to_str().unwrap())
// In Nix 2.19+, nix/args/root.hh depends on being able to #include "args.hh" (which is in its parent directory), for some reason
.flag("-I")
.flag(concat!(env!("NIX_INCLUDE_PATH"), "/nix"));
nix_dep.apply_version_flags(&mut build);
build.compile("nixbinding");
println!("cargo:rerun-if-changed=src/nix_store/bindings");
// the -l flags must be after -lnixbinding
nix_dep.emit_cargo_metadata();
}
}

View file

@ -9,7 +9,7 @@ use fastcdc::ronomon::FastCDC;
use futures::stream::Stream;
use tokio::io::AsyncRead;
use attic::stream::read_chunk_async;
use crate::stream::read_chunk_async;
/// Splits a streams into content-defined chunks.
///
@ -72,45 +72,30 @@ mod tests {
use std::io::Cursor;
use futures::StreamExt;
use tokio_test::block_on;
use crate::testing::get_fake_data;
/// Chunks and reconstructs a file.
#[test]
fn test_chunking_basic() {
fn case(size: usize) {
block_on(async move {
let test_file = get_data(size); // 32 MiB
let mut reconstructed_file = Vec::new();
#[tokio::test]
async fn test_chunking_basic() {
async fn case(size: usize) {
let test_file = get_fake_data(size); // 32 MiB
let mut reconstructed_file = Vec::new();
let cursor = Cursor::new(&test_file);
let mut chunks = chunk_stream(cursor, 8 * 1024, 16 * 1024, 32 * 1024);
let cursor = Cursor::new(&test_file);
let mut chunks = chunk_stream(cursor, 8 * 1024, 16 * 1024, 32 * 1024);
while let Some(chunk) = chunks.next().await {
let chunk = chunk.unwrap();
eprintln!("Got a {}-byte chunk", chunk.len());
reconstructed_file.extend(chunk);
}
while let Some(chunk) = chunks.next().await {
let chunk = chunk.unwrap();
eprintln!("Got a {}-byte chunk", chunk.len());
reconstructed_file.extend(chunk);
}
assert_eq!(reconstructed_file, test_file);
});
assert_eq!(reconstructed_file, test_file);
}
case(32 * 1024 * 1024 - 1);
case(32 * 1024 * 1024);
case(32 * 1024 * 1024 + 1);
}
/// Returns some fake data.
fn get_data(len: usize) -> Vec<u8> {
let mut state = 42u32;
let mut data = vec![0u8; len];
for i in 0..data.len() {
(state, _) = state.overflowing_mul(1664525u32);
(state, _) = state.overflowing_add(1013904223u32);
data[i] = ((state >> (i % 24)) & 0xff) as u8;
}
data
case(32 * 1024 * 1024 - 1).await;
case(32 * 1024 * 1024).await;
case(32 * 1024 * 1024 + 1).await;
}
}

View file

@ -118,7 +118,7 @@ impl Serialize for Hash {
}
/// Decodes a base16 or base32 encoded hash containing a specified number of bytes.
fn decode_hash<'s>(s: &'s str, typ: &'static str, expected_bytes: usize) -> AtticResult<Vec<u8>> {
fn decode_hash(s: &str, typ: &'static str, expected_bytes: usize) -> AtticResult<Vec<u8>> {
let base16_len = expected_bytes * 2;
let base32_len = (expected_bytes * 8 - 1) / 5 + 1;

View file

@ -17,12 +17,14 @@
pub mod api;
pub mod cache;
#[cfg(feature = "chunking")]
pub mod chunking;
pub mod error;
pub mod hash;
pub mod mime;
pub mod nix_store;
pub mod signing;
#[cfg(feature = "tokio")]
#[cfg(feature = "stream")]
pub mod stream;
#[cfg(target_family = "unix")]
pub mod testing;

View file

@ -18,7 +18,7 @@ unsafe impl Send for FfiNixStore {}
unsafe impl Sync for FfiNixStore {}
impl FfiNixStore {
pub fn store<'a>(&'a self) -> Pin<&'a mut ffi::CNixStore> {
pub fn store(&self) -> Pin<&mut ffi::CNixStore> {
unsafe {
let ptr = self.0.get().as_mut().unwrap();
ptr.pin_mut()

View file

@ -18,6 +18,14 @@ static nix::StorePath store_path_from_rust(RBasePathSlice base_name) {
return nix::StorePath(sv);
}
static bool hash_is_sha256(const nix::Hash &hash) {
#ifdef ATTIC_NIX_2_20
return hash.algo == nix::HashAlgorithm::SHA256;
#else
return hash.type == nix::htSHA256;
#endif
}
// ========
// RustSink
// ========
@ -44,7 +52,7 @@ CPathInfo::CPathInfo(nix::ref<const nix::ValidPathInfo> pi) : pi(pi) {}
RHashSlice CPathInfo::nar_sha256_hash() {
auto &hash = this->pi->narHash;
if (hash.type != nix::htSHA256) {
if (!hash_is_sha256(hash)) {
throw nix::Error("Only SHA-256 hashes are supported at the moment");
}

View file

@ -6,7 +6,6 @@ use std::os::unix::ffi::OsStrExt;
use std::process::Command;
use serde::de::DeserializeOwned;
use tokio_test::block_on;
pub mod test_nar;
@ -143,113 +142,105 @@ fn test_store_path_hash() {
StorePathHash::new(h).unwrap_err();
}
#[test]
fn test_nar_streaming() {
#[tokio::test]
async fn test_nar_streaming() {
let store = NixStore::connect().expect("Failed to connect to the Nix store");
block_on(async move {
let test_nar = test_nar::NO_DEPS;
test_nar.import().await.expect("Could not import test NAR");
let test_nar = test_nar::NO_DEPS;
test_nar.import().await.expect("Could not import test NAR");
let target = test_nar.get_target().expect("Could not create dump target");
let writer = target.get_writer().await.expect("Could not get writer");
let target = test_nar.get_target().expect("Could not create dump target");
let writer = target.get_writer().await.expect("Could not get writer");
let store_path = store.parse_store_path(test_nar.path()).unwrap();
let store_path = store.parse_store_path(test_nar.path()).unwrap();
let stream = store.nar_from_path(store_path);
stream.write_all(writer).await.unwrap();
let stream = store.nar_from_path(store_path);
stream.write_all(writer).await.unwrap();
target
.validate()
.await
.expect("Could not validate resulting dump");
});
target
.validate()
.await
.expect("Could not validate resulting dump");
}
#[test]
fn test_compute_fs_closure() {
#[tokio::test]
async fn test_compute_fs_closure() {
use test_nar::{WITH_DEPS_A, WITH_DEPS_B, WITH_DEPS_C};
let store = NixStore::connect().expect("Failed to connect to the Nix store");
block_on(async move {
use test_nar::{WITH_DEPS_A, WITH_DEPS_B, WITH_DEPS_C};
for nar in [WITH_DEPS_C, WITH_DEPS_B, WITH_DEPS_A] {
nar.import().await.expect("Could not import test NAR");
for nar in [WITH_DEPS_C, WITH_DEPS_B, WITH_DEPS_A] {
nar.import().await.expect("Could not import test NAR");
let path = store
.parse_store_path(nar.path())
.expect("Could not parse store path");
let actual: HashSet<StorePath> = store
.compute_fs_closure(path, false, false, false)
.await
.expect("Could not compute closure")
.into_iter()
.collect();
assert_eq!(nar.closure(), actual);
}
});
}
#[test]
fn test_compute_fs_closure_multi() {
let store = NixStore::connect().expect("Failed to connect to the Nix store");
block_on(async move {
use test_nar::{NO_DEPS, WITH_DEPS_A, WITH_DEPS_B, WITH_DEPS_C};
for nar in [NO_DEPS, WITH_DEPS_C, WITH_DEPS_B, WITH_DEPS_A] {
nar.import().await.expect("Could not import test NAR");
}
let mut expected = NO_DEPS.closure();
expected.extend(WITH_DEPS_A.closure());
let paths = vec![
store.parse_store_path(WITH_DEPS_A.path()).unwrap(),
store.parse_store_path(NO_DEPS.path()).unwrap(),
];
let path = store
.parse_store_path(nar.path())
.expect("Could not parse store path");
let actual: HashSet<StorePath> = store
.compute_fs_closure_multi(paths, false, false, false)
.compute_fs_closure(path, false, false, false)
.await
.expect("Could not compute closure")
.into_iter()
.collect();
eprintln!("Closure: {:#?}", actual);
assert_eq!(expected, actual);
});
assert_eq!(nar.closure(), actual);
}
}
#[test]
fn test_query_path_info() {
#[tokio::test]
async fn test_compute_fs_closure_multi() {
use test_nar::{NO_DEPS, WITH_DEPS_A, WITH_DEPS_B, WITH_DEPS_C};
let store = NixStore::connect().expect("Failed to connect to the Nix store");
block_on(async move {
use test_nar::{WITH_DEPS_B, WITH_DEPS_C};
for nar in [NO_DEPS, WITH_DEPS_C, WITH_DEPS_B, WITH_DEPS_A] {
nar.import().await.expect("Could not import test NAR");
}
for nar in [WITH_DEPS_C, WITH_DEPS_B] {
nar.import().await.expect("Could not import test NAR");
}
let mut expected = NO_DEPS.closure();
expected.extend(WITH_DEPS_A.closure());
let nar = WITH_DEPS_B;
let path = store.parse_store_path(nar.path()).unwrap();
let path_info = store
.query_path_info(path)
.await
.expect("Could not query path info");
let paths = vec![
store.parse_store_path(WITH_DEPS_A.path()).unwrap(),
store.parse_store_path(NO_DEPS.path()).unwrap(),
];
eprintln!("Path info: {:?}", path_info);
let actual: HashSet<StorePath> = store
.compute_fs_closure_multi(paths, false, false, false)
.await
.expect("Could not compute closure")
.into_iter()
.collect();
assert_eq!(nar.nar().len() as u64, path_info.nar_size);
assert_eq!(
vec![PathBuf::from(
"3k1wymic8p7h5pfcqfhh0jan8ny2a712-attic-test-with-deps-c-final"
),],
path_info.references
);
});
eprintln!("Closure: {:#?}", actual);
assert_eq!(expected, actual);
}
#[tokio::test]
async fn test_query_path_info() {
use test_nar::{WITH_DEPS_B, WITH_DEPS_C};
let store = NixStore::connect().expect("Failed to connect to the Nix store");
for nar in [WITH_DEPS_C, WITH_DEPS_B] {
nar.import().await.expect("Could not import test NAR");
}
let nar = WITH_DEPS_B;
let path = store.parse_store_path(nar.path()).unwrap();
let path_info = store
.query_path_info(path)
.await
.expect("Could not query path info");
eprintln!("Path info: {:?}", path_info);
assert_eq!(nar.nar().len() as u64, path_info.nar_size);
assert_eq!(
vec![PathBuf::from(
"3k1wymic8p7h5pfcqfhh0jan8ny2a712-attic-test-with-deps-c-final"
),],
path_info.references
);
}

View file

@ -176,10 +176,9 @@ mod tests {
use bytes::{BufMut, BytesMut};
use futures::future;
use tokio::io::AsyncReadExt;
use tokio_test::block_on;
#[test]
fn test_stream_hasher() {
#[tokio::test]
async fn test_stream_hasher() {
let expected = b"hello world";
let expected_sha256 =
hex::decode("b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9")
@ -191,10 +190,22 @@ mod tests {
// force multiple reads
let mut buf = vec![0u8; 100];
let mut bytes_read = 0;
bytes_read += block_on(read.read(&mut buf[bytes_read..bytes_read + 5])).unwrap();
bytes_read += block_on(read.read(&mut buf[bytes_read..bytes_read + 5])).unwrap();
bytes_read += block_on(read.read(&mut buf[bytes_read..bytes_read + 5])).unwrap();
bytes_read += block_on(read.read(&mut buf[bytes_read..bytes_read + 5])).unwrap();
bytes_read += read
.read(&mut buf[bytes_read..bytes_read + 5])
.await
.unwrap();
bytes_read += read
.read(&mut buf[bytes_read..bytes_read + 5])
.await
.unwrap();
bytes_read += read
.read(&mut buf[bytes_read..bytes_read + 5])
.await
.unwrap();
bytes_read += read
.read(&mut buf[bytes_read..bytes_read + 5])
.await
.unwrap();
assert_eq!(expected.len(), bytes_read);
assert_eq!(expected, &buf[..bytes_read]);
@ -206,8 +217,8 @@ mod tests {
eprintln!("finalized = {:x?}", finalized);
}
#[test]
fn test_merge_chunks() {
#[tokio::test]
async fn test_merge_chunks() {
let chunk_a: BoxStream<Result<Bytes, ()>> = {
let s = stream! {
yield Ok(Bytes::from_static(b"Hello"));
@ -236,13 +247,11 @@ mod tests {
let streamer = |c, _| future::ok(c);
let mut merged = merge_chunks(chunks, streamer, (), 2);
let bytes = block_on(async move {
let mut bytes = BytesMut::with_capacity(100);
while let Some(item) = merged.next().await {
bytes.put(item.unwrap());
}
bytes.freeze()
});
let mut bytes = BytesMut::with_capacity(100);
while let Some(item) = merged.next().await {
bytes.put(item.unwrap());
}
let bytes = bytes.freeze();
assert_eq!(&*bytes, b"Hello, world!");
}

View file

@ -1,3 +1,27 @@
//! Utilities for testing.
pub mod shadow_store;
use tokio::runtime::Runtime;
/// Returns a new Tokio runtime.
pub fn get_runtime() -> Runtime {
tokio::runtime::Builder::new_current_thread()
.enable_time()
.build()
.unwrap()
}
/// Returns some fake data.
pub fn get_fake_data(len: usize) -> Vec<u8> {
let mut state = 42u32;
let mut data = vec![0u8; len];
for (i, byte) in data.iter_mut().enumerate() {
(state, _) = state.overflowing_mul(1664525u32);
(state, _) = state.overflowing_add(1013904223u32);
*byte = ((state >> (i % 24)) & 0xff) as u8;
}
data
}

View file

@ -14,7 +14,7 @@ use std::str::FromStr;
use anyhow::{anyhow, Result};
use serde::{Deserialize, Serialize};
pub use attic::cache::{CacheName};
pub use attic::cache::CacheName;
/// A reference to a cache.
#[derive(Debug, Clone)]

View file

@ -4,12 +4,13 @@ use std::sync::Arc;
use anyhow::{anyhow, Result};
use clap::Parser;
use indicatif::MultiProgress;
use tokio::io::{self, AsyncBufReadExt, BufReader};
use crate::api::ApiClient;
use crate::cache::CacheRef;
use crate::cache::{CacheName, CacheRef, ServerName};
use crate::cli::Opts;
use crate::config::Config;
use crate::push::{PushConfig, Pusher};
use crate::push::{PushConfig, PushSessionConfig, Pusher};
use attic::nix_store::NixStore;
/// Push closures to a binary cache.
@ -24,6 +25,10 @@ pub struct Push {
/// The store paths to push.
paths: Vec<PathBuf>,
/// Read paths from the standard input.
#[clap(long)]
stdin: bool,
/// Push the specified paths only and do not compute closures.
#[clap(long)]
no_closure: bool,
@ -41,6 +46,79 @@ pub struct Push {
force_preamble: bool,
}
struct PushContext {
store: Arc<NixStore>,
cache_name: CacheName,
server_name: ServerName,
pusher: Pusher,
no_closure: bool,
ignore_upstream_cache_filter: bool,
}
impl PushContext {
async fn push_static(self, paths: Vec<PathBuf>) -> Result<()> {
let roots = paths
.into_iter()
.map(|p| self.store.follow_store_path(p))
.collect::<std::result::Result<Vec<_>, _>>()?;
let plan = self
.pusher
.plan(roots, self.no_closure, self.ignore_upstream_cache_filter)
.await?;
if plan.store_path_map.is_empty() {
if plan.num_all_paths == 0 {
eprintln!("🤷 Nothing selected.");
} else {
eprintln!(
"✅ All done! ({num_already_cached} already cached, {num_upstream} in upstream)",
num_already_cached = plan.num_already_cached,
num_upstream = plan.num_upstream,
);
}
return Ok(());
} else {
eprintln!("⚙️ Pushing {num_missing_paths} paths to \"{cache}\" on \"{server}\" ({num_already_cached} already cached, {num_upstream} in upstream)...",
cache = self.cache_name.as_str(),
server = self.server_name.as_str(),
num_missing_paths = plan.store_path_map.len(),
num_already_cached = plan.num_already_cached,
num_upstream = plan.num_upstream,
);
}
for (_, path_info) in plan.store_path_map {
self.pusher.queue(path_info).await?;
}
let results = self.pusher.wait().await;
results.into_values().collect::<Result<Vec<()>>>()?;
Ok(())
}
async fn push_stdin(self) -> Result<()> {
let session = self.pusher.into_push_session(PushSessionConfig {
no_closure: self.no_closure,
ignore_upstream_cache_filter: self.ignore_upstream_cache_filter,
});
let stdin = BufReader::new(io::stdin());
let mut lines = stdin.lines();
while let Some(line) = lines.next_line().await? {
let path = self.store.follow_store_path(line)?;
session.queue_many(vec![path])?;
}
let results = session.wait().await?;
results.into_values().collect::<Result<Vec<()>>>()?;
Ok(())
}
}
pub async fn run(opts: Opts) -> Result<()> {
let sub = opts.command.as_push().unwrap();
if sub.jobs == 0 {
@ -50,19 +128,13 @@ pub async fn run(opts: Opts) -> Result<()> {
let config = Config::load()?;
let store = Arc::new(NixStore::connect()?);
let roots = sub
.paths
.clone()
.into_iter()
.map(|p| store.follow_store_path(p))
.collect::<std::result::Result<Vec<_>, _>>()?;
let (server_name, server, cache) = config.resolve_cache(&sub.cache)?;
let (server_name, server, cache_name) = config.resolve_cache(&sub.cache)?;
let mut api = ApiClient::from_server_config(server.clone())?;
// Confirm remote cache validity, query cache config
let cache_config = api.get_cache_config(cache).await?;
let cache_config = api.get_cache_config(cache_name).await?;
if let Some(api_endpoint) = &cache_config.api_endpoint {
// Use delegated API endpoint
@ -76,39 +148,29 @@ pub async fn run(opts: Opts) -> Result<()> {
let mp = MultiProgress::new();
let pusher = Pusher::new(store, api, cache.to_owned(), cache_config, mp, push_config);
let plan = pusher
.plan(roots, sub.no_closure, sub.ignore_upstream_cache_filter)
.await?;
let pusher = Pusher::new(
store.clone(),
api,
cache_name.to_owned(),
cache_config,
mp,
push_config,
);
if plan.store_path_map.is_empty() {
if plan.num_all_paths == 0 {
eprintln!("🤷 Nothing selected.");
} else {
eprintln!(
"✅ All done! ({num_already_cached} already cached, {num_upstream} in upstream)",
num_already_cached = plan.num_already_cached,
num_upstream = plan.num_upstream,
);
}
let push_ctx = PushContext {
store,
cache_name: cache_name.clone(),
server_name: server_name.clone(),
pusher,
no_closure: sub.no_closure,
ignore_upstream_cache_filter: sub.ignore_upstream_cache_filter,
};
return Ok(());
if sub.stdin {
push_ctx.push_stdin().await?;
} else {
eprintln!("⚙️ Pushing {num_missing_paths} paths to \"{cache}\" on \"{server}\" ({num_already_cached} already cached, {num_upstream} in upstream)...",
cache = cache.as_str(),
server = server_name.as_str(),
num_missing_paths = plan.store_path_map.len(),
num_already_cached = plan.num_already_cached,
num_upstream = plan.num_upstream,
);
push_ctx.push_static(sub.paths.clone()).await?;
}
for (_, path_info) in plan.store_path_map {
pusher.queue(path_info).await?;
}
let results = pusher.wait().await;
results.into_values().collect::<Result<Vec<()>>>()?;
Ok(())
}

View file

@ -28,7 +28,7 @@ use bytes::Bytes;
use futures::future::join_all;
use futures::stream::{Stream, TryStreamExt};
use indicatif::{HumanBytes, MultiProgress, ProgressBar, ProgressState, ProgressStyle};
use tokio::sync::Mutex;
use tokio::sync::{mpsc, Mutex};
use tokio::task::{spawn, JoinHandle};
use tokio::time;
@ -100,11 +100,22 @@ pub struct Pusher {
/// seconds since the last path is queued or it's been 10 seconds in total.
pub struct PushSession {
/// Sender to the batching future.
sender: channel::Sender<Vec<StorePath>>,
sender: channel::Sender<SessionQueueCommand>,
/// Receiver of results.
result_receiver: mpsc::Receiver<Result<HashMap<StorePath, Result<()>>>>,
}
enum SessionQueueCommand {
Paths(Vec<StorePath>),
Flush,
Terminate,
}
enum SessionQueuePoll {
Paths(Vec<StorePath>),
Flush,
Terminate,
Closed,
TimedOut,
}
@ -255,36 +266,36 @@ impl Pusher {
impl PushSession {
pub fn with_pusher(pusher: Pusher, config: PushSessionConfig) -> Self {
let (sender, receiver) = channel::unbounded();
let (result_sender, result_receiver) = mpsc::channel(1);
let known_paths_mutex = Arc::new(Mutex::new(HashSet::new()));
// FIXME
spawn(async move {
let pusher = Arc::new(pusher);
loop {
if let Err(e) = Self::worker(
pusher.clone(),
config,
known_paths_mutex.clone(),
receiver.clone(),
)
.await
{
eprintln!("Worker exited: {:?}", e);
} else {
break;
}
if let Err(e) = Self::worker(
pusher,
config,
known_paths_mutex.clone(),
receiver.clone(),
result_sender.clone(),
)
.await
{
let _ = result_sender.send(Err(e)).await;
}
});
Self { sender }
Self {
sender,
result_receiver,
}
}
async fn worker(
pusher: Arc<Pusher>,
pusher: Pusher,
config: PushSessionConfig,
known_paths_mutex: Arc<Mutex<HashSet<StorePathHash>>>,
receiver: channel::Receiver<Vec<StorePath>>,
receiver: channel::Receiver<SessionQueueCommand>,
result_sender: mpsc::Sender<Result<HashMap<StorePath, Result<()>>>>,
) -> Result<()> {
let mut roots = HashSet::new();
@ -296,7 +307,9 @@ impl PushSession {
loop {
let poll = tokio::select! {
r = receiver.recv() => match r {
Ok(paths) => SessionQueuePoll::Paths(paths),
Ok(SessionQueueCommand::Paths(paths)) => SessionQueuePoll::Paths(paths),
Ok(SessionQueueCommand::Flush) => SessionQueuePoll::Flush,
Ok(SessionQueueCommand::Terminate) => SessionQueuePoll::Terminate,
_ => SessionQueuePoll::Closed,
},
_ = time::sleep(Duration::from_secs(2)) => SessionQueuePoll::TimedOut,
@ -306,10 +319,10 @@ impl PushSession {
SessionQueuePoll::Paths(store_paths) => {
roots.extend(store_paths.into_iter());
}
SessionQueuePoll::Closed => {
SessionQueuePoll::Closed | SessionQueuePoll::Terminate => {
break true;
}
SessionQueuePoll::TimedOut => {
SessionQueuePoll::Flush | SessionQueuePoll::TimedOut => {
break false;
}
}
@ -352,15 +365,37 @@ impl PushSession {
drop(known_paths);
if done {
let result = pusher.wait().await;
result_sender.send(Ok(result)).await?;
return Ok(());
}
}
}
/// Waits for all workers to terminate, returning all results.
pub async fn wait(mut self) -> Result<HashMap<StorePath, Result<()>>> {
self.flush()?;
// The worker might have died
let _ = self.sender.send(SessionQueueCommand::Terminate).await;
self.result_receiver
.recv()
.await
.expect("Nothing in result channel")
}
/// Queues multiple store paths to be pushed.
pub fn queue_many(&self, store_paths: Vec<StorePath>) -> Result<()> {
self.sender
.send_blocking(store_paths)
.send_blocking(SessionQueueCommand::Paths(store_paths))
.map_err(|e| anyhow!(e))
}
/// Flushes the worker queue.
pub fn flush(&self) -> Result<()> {
self.sender
.send_blocking(SessionQueueCommand::Flush)
.map_err(|e| anyhow!(e))
}
}

View file

@ -7,8 +7,9 @@
{ stdenv
, lib
, buildPackages
, craneLib
, rustPlatform
, rust
, runCommand
, writeReferencesToFile
, pkg-config
@ -19,12 +20,22 @@
, boost
, darwin
, libiconv
, extraPackageArgs ? {}
}:
let
version = "0.1.0";
ignoredPaths = [ ".github" "target" "book" "nixos" "integration-tests" ];
ignoredPaths = [
".ci"
".github"
"book"
"flake"
"integration-tests"
"nixos"
"target"
];
src = lib.cleanSourceWith {
filter = name: type: !(type == "directory" && builtins.elem (baseNameOf name) ignoredPaths);
@ -43,7 +54,19 @@ let
libiconv
];
cargoArtifacts = craneLib.buildDepsOnly {
crossArgs = let
rustTargetSpec = rust.toRustTargetSpec stdenv.hostPlatform;
rustTargetSpecEnv = lib.toUpper (builtins.replaceStrings [ "-" ] [ "_" ] rustTargetSpec);
in lib.optionalAttrs (stdenv.hostPlatform != stdenv.buildPlatform) {
depsBuildBuild = [ buildPackages.stdenv.cc ];
CARGO_BUILD_TARGET = rustTargetSpec;
"CARGO_TARGET_${rustTargetSpecEnv}_LINKER" = "${stdenv.cc.targetPrefix}cc";
};
extraArgs = crossArgs // extraPackageArgs;
cargoArtifacts = craneLib.buildDepsOnly ({
pname = "attic";
inherit src version nativeBuildInputs buildInputs;
@ -54,7 +77,7 @@ let
# With `use-zstd`, the cargo artifacts are archived in a `tar.zstd`. This is
# actually set if you use `buildPackage` without passing `cargoArtifacts`.
installCargoArtifactsMode = "use-zstd";
};
} // extraArgs);
mkAttic = args: craneLib.buildPackage ({
pname = "attic";
@ -86,7 +109,11 @@ let
maintainers = with maintainers; [ zhaofengli ];
platforms = platforms.linux ++ platforms.darwin;
};
} // args);
passthru = {
inherit nix;
};
} // args // extraArgs);
attic = mkAttic {
cargoExtraArgs = "-p attic-client -p attic-server";
@ -106,7 +133,7 @@ let
#
# We don't enable fat LTO in the default `attic` package since it
# dramatically increases build time.
attic-server = craneLib.buildPackage {
attic-server = craneLib.buildPackage ({
pname = "attic-server";
# We don't pull in the common cargoArtifacts because the feature flags
@ -120,13 +147,13 @@ let
CARGO_PROFILE_RELEASE_LTO = "fat";
CARGO_PROFILE_RELEASE_CODEGEN_UNITS = "1";
};
} // extraArgs);
# Attic interacts with Nix directly and its tests require trusted-user access
# to nix-daemon to import NARs, which is not possible in the build sandbox.
# In the CI pipeline, we build the test executable inside the sandbox, then
# run it outside.
attic-tests = craneLib.mkCargoDerivation {
attic-tests = craneLib.mkCargoDerivation ({
pname = "attic-tests";
inherit src version buildInputs cargoArtifacts;
@ -151,7 +178,7 @@ let
runHook postInstall
'';
};
} // extraArgs);
in {
inherit cargoArtifacts attic attic-client attic-server attic-tests;
}

View file

@ -7,11 +7,11 @@
]
},
"locked": {
"lastModified": 1717025063,
"narHash": "sha256-dIubLa56W9sNNz0e8jGxrX3CAkPXsq7snuFA/Ie6dn8=",
"lastModified": 1722960479,
"narHash": "sha256-NhCkJJQhD5GUib8zN9JrmYGMwt4lCRp6ZVNzIiYCl0Y=",
"owner": "ipetkov",
"repo": "crane",
"rev": "480dff0be03dac0e51a8dfc26e882b0d123a450e",
"rev": "4c6c77920b8d44cd6660c1621dea6b3fc4b4c4f4",
"type": "github"
},
"original": {
@ -23,11 +23,11 @@
"flake-compat": {
"flake": false,
"locked": {
"lastModified": 1673956053,
"narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
"lastModified": 1696426674,
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
"type": "github"
},
"original": {
@ -36,28 +36,33 @@
"type": "github"
}
},
"flake-utils": {
"flake-parts": {
"inputs": {
"nixpkgs-lib": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1667395993,
"narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f",
"lastModified": 1722555600,
"narHash": "sha256-XOQkdLafnb/p9ij77byFQjDf5m5QYl9b2REiVClC+x4=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "8471fe90ad337a8074e957b69ca4d0089218391d",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"owner": "hercules-ci",
"repo": "flake-parts",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1711401922,
"narHash": "sha256-QoQqXoj8ClGo0sqD/qWKFWezgEwUL0SUh37/vY2jNhc=",
"lastModified": 1726042813,
"narHash": "sha256-LnNKCCxnwgF+575y0pxUdlGZBO/ru1CtGHIqQVfvjlA=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "07262b18b97000d16a4bdb003418bd2fb067a932",
"rev": "159be5db480d1df880a0135ca0bfed84c2f88353",
"type": "github"
},
"original": {
@ -69,16 +74,16 @@
},
"nixpkgs-stable": {
"locked": {
"lastModified": 1711460390,
"narHash": "sha256-akSgjDZL6pVHEfSE6sz1DNSXuYX6hq+P/1Z5IoYWs7E=",
"lastModified": 1724316499,
"narHash": "sha256-Qb9MhKBUTCfWg/wqqaxt89Xfi6qTD3XpTzQ9eXi3JmE=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "44733514b72e732bd49f5511bd0203dea9b9a434",
"rev": "797f7dc49e0bc7fab4b57c021cdf68f595e47841",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-23.11",
"ref": "nixos-24.05",
"repo": "nixpkgs",
"type": "github"
}
@ -87,7 +92,7 @@
"inputs": {
"crane": "crane",
"flake-compat": "flake-compat",
"flake-utils": "flake-utils",
"flake-parts": "flake-parts",
"nixpkgs": "nixpkgs",
"nixpkgs-stable": "nixpkgs-stable"
}

196
flake.nix
View file

@ -3,8 +3,12 @@
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
nixpkgs-stable.url = "github:NixOS/nixpkgs/nixos-23.11";
flake-utils.url = "github:numtide/flake-utils";
nixpkgs-stable.url = "github:NixOS/nixpkgs/nixos-24.05";
flake-parts = {
url = "github:hercules-ci/flake-parts";
inputs.nixpkgs-lib.follows = "nixpkgs";
};
crane = {
url = "github:ipetkov/crane";
@ -17,175 +21,31 @@
};
};
outputs = { self, nixpkgs, nixpkgs-stable, flake-utils, crane, ... }: let
supportedSystems = flake-utils.lib.defaultSystems ++ [ "riscv64-linux" ];
outputs = inputs @ { self, flake-parts, ... }: let
supportedSystems = [
"x86_64-linux"
"aarch64-linux"
"riscv64-linux"
"aarch64-darwin"
"x86_64-darwin"
];
makeCranePkgs = pkgs: let
craneLib = crane.mkLib pkgs;
in pkgs.callPackage ./crane.nix { inherit craneLib; };
in flake-utils.lib.eachSystem supportedSystems (system: let
pkgs = import nixpkgs {
inherit system;
overlays = [];
};
cranePkgs = makeCranePkgs pkgs;
inherit (inputs.nixpkgs) lib;
pkgsStable = import nixpkgs-stable {
inherit system;
overlays = [];
};
cranePkgsStable = makeCranePkgs pkgsStable;
modules = builtins.foldl' (acc: f: f acc) ./flake [
builtins.readDir
(lib.filterAttrs (name: type:
type == "regular" && lib.hasSuffix ".nix" name
))
(lib.mapAttrsToList (name: _:
lib.path.append ./flake name
))
];
inherit (pkgs) lib;
in rec {
packages = {
default = packages.attic;
in flake-parts.lib.mkFlake { inherit inputs; } {
imports = modules;
systems = supportedSystems;
inherit (cranePkgs) attic attic-client attic-server;
attic-nixpkgs = pkgs.callPackage ./package.nix { };
attic-ci-installer = pkgs.callPackage ./ci-installer.nix {
inherit self;
};
book = pkgs.callPackage ./book {
attic = packages.attic;
};
} // (lib.optionalAttrs (system != "x86_64-darwin") {
# Unfortunately, x86_64-darwin fails to evaluate static builds
# TODO: Make this work with Crane
attic-static = (pkgs.pkgsStatic.callPackage ./package.nix {
nix = pkgs.pkgsStatic.nix.overrideAttrs (old: {
patches = (old.patches or []) ++ [
# To be submitted
(pkgs.fetchpatch {
url = "https://github.com/NixOS/nix/compare/3172c51baff5c81362fcdafa2e28773c2949c660...6b09a02536d5946458b537dfc36b7d268c9ce823.diff";
hash = "sha256-LFLq++J2XitEWQ0o57ihuuUlYk2PgUr11h7mMMAEe3c=";
})
];
});
}).overrideAttrs (old: {
nativeBuildInputs = (old.nativeBuildInputs or []) ++ [
pkgs.nukeReferences
];
# Read by pkg_config crate (do some autodetection in build.rs?)
PKG_CONFIG_ALL_STATIC = "1";
"NIX_CFLAGS_LINK_${pkgs.pkgsStatic.stdenv.cc.suffixSalt}" = "-lc";
RUSTFLAGS = "-C relocation-model=static";
postFixup = (old.postFixup or "") + ''
rm -f $out/nix-support/propagated-build-inputs
nuke-refs $out/bin/attic
'';
});
attic-client-static = packages.attic-static.override {
clientOnly = true;
};
}) // (lib.optionalAttrs pkgs.stdenv.isLinux {
attic-server-image = pkgs.dockerTools.buildImage {
name = "attic-server";
tag = "main";
copyToRoot = [
# Debugging utilities for `fly ssh console`
pkgs.busybox
packages.attic-server
# Now required by the fly.io sshd
pkgs.dockerTools.fakeNss
];
config = {
Entrypoint = [ "${packages.attic-server}/bin/atticd" ];
Env = [
"SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt"
];
};
};
});
devShells = {
default = pkgs.mkShell {
inputsFrom = with packages; [ attic book ];
nativeBuildInputs = with pkgs; [
rustc
rustfmt clippy
cargo-expand cargo-outdated cargo-edit
tokio-console
sqlite-interactive
editorconfig-checker
flyctl
wrk
] ++ (lib.optionals pkgs.stdenv.isLinux [
linuxPackages.perf
]);
NIX_PATH = "nixpkgs=${pkgs.path}";
RUST_SRC_PATH = "${pkgs.rustPlatform.rustcSrc}/library";
# See comment in `attic/build.rs`
NIX_INCLUDE_PATH = "${lib.getDev pkgs.nix}/include";
ATTIC_DISTRIBUTOR = "dev";
};
demo = pkgs.mkShell {
nativeBuildInputs = [
packages.default
];
shellHook = ''
>&2 echo
>&2 echo '🚀 Run `atticd` to get started!'
>&2 echo
'';
};
};
devShell = devShells.default;
internal = {
inherit (cranePkgs) attic-tests cargoArtifacts;
};
checks = let
makeIntegrationTests = pkgs: import ./integration-tests {
pkgs = import nixpkgs {
inherit system;
overlays = [ self.overlays.default ];
};
flake = self;
};
unstableTests = makeIntegrationTests pkgs;
stableTests = lib.mapAttrs' (name: lib.nameValuePair "stable-${name}") (makeIntegrationTests pkgsStable);
in lib.optionalAttrs pkgs.stdenv.isLinux (unstableTests // stableTests);
}) // {
overlays = {
default = final: prev: let
cranePkgs = makeCranePkgs final;
in {
inherit (cranePkgs) attic attic-client attic-server;
};
};
nixosModules = {
atticd = {
imports = [
./nixos/atticd.nix
];
services.atticd.useFlakeCompatOverlay = false;
nixpkgs.overlays = [
self.overlays.default
];
};
};
debug = true;
};
}

118
flake/devshells.nix Normal file
View file

@ -0,0 +1,118 @@
# Development shells
toplevel @ { lib, flake-parts-lib, ... }:
let
inherit (lib)
mkOption
types
;
inherit (flake-parts-lib)
mkPerSystemOption
;
in
{
options = {
perSystem = mkPerSystemOption {
options.attic.devshell = {
packageSets = mkOption {
type = types.attrsOf (types.listOf types.package);
default = {};
};
extraPackages = mkOption {
type = types.listOf types.package;
default = [];
};
extraArgs = mkOption {
type = types.attrsOf types.unspecified;
default = {};
};
};
};
};
config = {
perSystem = { self', pkgs, config, ... }: let
cfg = config.attic.devshell;
in {
attic.devshell.packageSets = with pkgs; {
rustc = lib.optionals (config.attic.toolchain == null) [
rustc
];
rust = [
cargo-expand
cargo-outdated
cargo-edit
tokio-console
];
linters = [
clippy
rustfmt
editorconfig-checker
];
utils = [
jq
just
];
ops = [
postgresql
sqlite-interactive
flyctl
skopeo
manifest-tool
] ++ lib.optionals pkgs.stdenv.isLinux [
wrangler
];
bench = [
wrk
] ++ lib.optionals pkgs.stdenv.isLinux [
linuxPackages.perf
];
wasm = [
llvmPackages_latest.bintools
worker-build wasm-pack wasm-bindgen-cli
];
};
devShells.default = pkgs.mkShell (lib.recursiveUpdate {
inputsFrom = [
self'.packages.attic
self'.packages.book
];
packages = lib.flatten (lib.attrValues cfg.packageSets);
env = {
ATTIC_DISTRIBUTOR = toplevel.config.attic.distributor;
RUST_SRC_PATH = "${pkgs.rustPlatform.rustcSrc}/library";
NIX_PATH = "nixpkgs=${pkgs.path}";
# See comment in `attic/build.rs`
NIX_INCLUDE_PATH = "${lib.getDev self'.packages.attic.passthru.nix}/include";
# Used by `just with-nix` to build/test with alternative Nix versions.
NIX_VERSIONS = config.attic.nix-versions.manifestFile;
};
} cfg.extraArgs);
devShells.demo = pkgs.mkShell {
packages = [ self'.packages.default ];
shellHook = ''
>&2 echo
>&2 echo '🚀 Run `atticd` to get started!'
>&2 echo
'';
};
};
};
}

15
flake/distributor.nix Normal file
View file

@ -0,0 +1,15 @@
{ lib, flake-parts-lib, ... }:
let
inherit (lib)
mkOption
types
;
in
{
options = {
attic.distributor = mkOption {
type = types.str;
default = "dev";
};
};
}

View file

@ -0,0 +1,60 @@
{ lib, flake-parts-lib, inputs, self, ... }:
let
inherit (lib)
mkOption
types
;
inherit (flake-parts-lib)
mkPerSystemOption
;
in
{
options = {
perSystem = mkPerSystemOption {
options.attic.integration-tests = {
nixpkgsArgs = mkOption {
type = types.attrsOf types.anything;
default = {};
};
tests = mkOption {
type = types.attrsOf types.package;
default = {};
};
stableTests = mkOption {
type = types.attrsOf types.package;
default = {};
};
};
};
};
config = {
perSystem = { self', pkgs, config, system, ... }: let
cfg = config.attic.integration-tests;
vmPkgs = import inputs.nixpkgs ({
inherit system;
overlays = [ self.overlays.default ];
} // cfg.nixpkgsArgs);
vmPkgsStable = import inputs.nixpkgs-stable ({
inherit system;
overlays = [ self.overlays.default ];
} // cfg.nixpkgsArgs);
makeIntegrationTests = pkgs: import ../integration-tests {
inherit pkgs;
flake = self;
};
in {
attic.integration-tests = {
tests = makeIntegrationTests vmPkgs;
stableTests = makeIntegrationTests vmPkgsStable;
};
checks = let
tests = cfg.tests;
stableTests = lib.mapAttrs' (name: lib.nameValuePair "stable-${name}") cfg.stableTests;
in lib.optionalAttrs pkgs.stdenv.isLinux (tests // stableTests);
};
};
}

64
flake/nix-versions.nix Normal file
View file

@ -0,0 +1,64 @@
{ lib, flake-parts-lib, config, ... }:
let
inherit (lib)
mkOption
types
;
inherit (flake-parts-lib)
mkPerSystemOption
;
in
{
options = {
perSystem = mkPerSystemOption {
options.attic.nix-versions = {
versions = mkOption {
type = types.attrsOf types.package;
default = {};
};
manifestFile = mkOption {
type = types.package;
};
};
options.internalMatrix = mkOption {
type = types.attrsOf (types.attrsOf types.package);
};
};
};
config = {
flake.internalMatrix = lib.mapAttrs (system: ps: ps.internalMatrix) config.allSystems;
perSystem = { self', pkgs, config, cranePkgs, ... }: let
cfg = config.attic.nix-versions;
in {
attic.nix-versions = {
versions = {
default = pkgs.nix;
"2.20" = pkgs.nixVersions.nix_2_20;
"2.24" = pkgs.nixVersions.nix_2_24;
};
manifestFile = let
manifest = lib.mapAttrs (_: nix: {
inherit nix;
shellHook = ''
export NIX_INCLUDE_PATH="${lib.getDev nix}/include"
export NIX_CFLAGS_COMPILE="-isystem $NIX_INCLUDE_PATH $NIX_CFLAGS_COMPILE"
export NIX_LDFLAGS="-L${nix}/lib $NIX_LDFLAGS"
export PKG_CONFIG_PATH="${lib.getDev nix}/lib/pkgconfig:$PKG_CONFIG_PATH"
export PATH="${lib.getBin nix}/bin:$PATH"
'';
}) cfg.versions;
in pkgs.writeText "nix-versions.json" (builtins.toJSON manifest);
};
internalMatrix = lib.mapAttrs (_: nix: let
cranePkgs' = cranePkgs.override { inherit nix; };
in {
inherit (cranePkgs') attic-tests cargoArtifacts;
}) cfg.versions;
};
};
}

16
flake/nixos.nix Normal file
View file

@ -0,0 +1,16 @@
{ config, ... }:
{
flake.nixosModules = {
atticd = {
imports = [
../nixos/atticd.nix
];
services.atticd.useFlakeCompatOverlay = false;
nixpkgs.overlays = [
config.flake.overlays.default
];
};
};
}

14
flake/overlays.nix Normal file
View file

@ -0,0 +1,14 @@
{ makeCranePkgs, ... }:
{
flake.overlays = {
default = final: prev: let
cranePkgs = makeCranePkgs final;
in {
inherit (cranePkgs)
attic
attic-client
attic-server
;
};
};
}

161
flake/packages.nix Normal file
View file

@ -0,0 +1,161 @@
{ self
, lib
, flake-parts-lib
, inputs
, config
, makeCranePkgs
, getSystem
, ...
}:
let
inherit (lib)
mkOption
types
;
inherit (flake-parts-lib)
mkPerSystemOption
;
# Re-evaluate perSystem with cross nixpkgs
# HACK before https://github.com/hercules-ci/flake-parts/issues/95 is solved
evalCross = { system, pkgs }: config.allSystems.${system}.debug.extendModules {
modules = [
({ config, lib, ... }: {
_module.args.pkgs = pkgs;
_module.args.self' = lib.mkForce config;
})
];
};
in
{
options = {
perSystem = mkPerSystemOption {
options.attic = {
toolchain = mkOption {
type = types.nullOr types.package;
default = null;
};
extraPackageArgs = mkOption {
type = types.attrsOf types.anything;
default = {};
};
};
};
};
config = {
_module.args.makeCranePkgs = lib.mkDefault (pkgs: let
perSystemConfig = getSystem pkgs.system;
craneLib = builtins.foldl' (acc: f: f acc) pkgs [
inputs.crane.mkLib
(craneLib:
if perSystemConfig.attic.toolchain == null then craneLib
else craneLib.overrideToolchain config.attic.toolchain
)
];
in pkgs.callPackage ../crane.nix {
inherit craneLib;
inherit (perSystemConfig.attic) extraPackageArgs;
});
perSystem = { self', pkgs, config, cranePkgs, ... }: (lib.mkMerge [
{
_module.args.cranePkgs = makeCranePkgs pkgs;
packages = {
default = self'.packages.attic;
inherit (cranePkgs)
attic
attic-client
attic-server
;
attic-nixpkgs = pkgs.callPackage ../package.nix { };
attic-ci-installer = pkgs.callPackage ../ci-installer.nix {
inherit self;
};
book = pkgs.callPackage ../book {
attic = self'.packages.attic;
};
};
}
(lib.mkIf pkgs.stdenv.isLinux {
packages = {
attic-server-image = pkgs.dockerTools.buildImage {
name = "attic-server";
tag = "main";
copyToRoot = [
self'.packages.attic-server
# Debugging utilities for `fly ssh console`
pkgs.busybox
# Now required by the fly.io sshd
pkgs.dockerTools.fakeNss
];
config = {
Entrypoint = [ "${self'.packages.attic-server}/bin/atticd" ];
Env = [
"SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt"
];
};
};
};
})
(lib.mkIf (pkgs.system == "x86_64-linux") {
packages = {
attic-server-image-aarch64 = let
eval = evalCross {
system = "aarch64-linux";
pkgs = pkgs.pkgsCross.aarch64-multiplatform;
};
in eval.config.packages.attic-server-image;
};
})
# Unfortunately, x86_64-darwin fails to evaluate static builds
(lib.mkIf (pkgs.system != "x86_64-darwin") {
packages = {
# TODO: Make this work with Crane
attic-static = (pkgs.pkgsStatic.callPackage ../package.nix {
nix = pkgs.pkgsStatic.nix.overrideAttrs (old: {
patches = (old.patches or []) ++ [
# To be submitted
(pkgs.fetchpatch {
url = "https://github.com/NixOS/nix/compare/3172c51baff5c81362fcdafa2e28773c2949c660...6b09a02536d5946458b537dfc36b7d268c9ce823.diff";
hash = "sha256-LFLq++J2XitEWQ0o57ihuuUlYk2PgUr11h7mMMAEe3c=";
})
];
});
}).overrideAttrs (old: {
nativeBuildInputs = (old.nativeBuildInputs or []) ++ [
pkgs.nukeReferences
];
# Read by pkg_config crate (do some autodetection in build.rs?)
PKG_CONFIG_ALL_STATIC = "1";
"NIX_CFLAGS_LINK_${pkgs.pkgsStatic.stdenv.cc.suffixSalt}" = "-lc";
RUSTFLAGS = "-C relocation-model=static";
postFixup = (old.postFixup or "") + ''
rm -f $out/nix-support/propagated-build-inputs
nuke-refs $out/bin/attic
'';
});
attic-client-static = self'.packages.attic-static.override {
clientOnly = true;
};
};
})
]);
};
}

View file

@ -188,8 +188,8 @@ in {
server.wait_for_unit('atticd.service')
client.wait_until_succeeds("curl -sL http://server:8080", timeout=40)
root_token = server.succeed("${cmd.atticadm} make-token --sub 'e2e-root' --validity '1 month' --push '*' --pull '*' --delete '*' --create-cache '*' --destroy-cache '*' --configure-cache '*' --configure-cache-retention '*'").strip()
readonly_token = server.succeed("${cmd.atticadm} make-token --sub 'e2e-root' --validity '1 month' --pull 'test'").strip()
root_token = server.succeed("${cmd.atticadm} make-token --sub 'e2e-root' --validity '1 month' --push '*' --pull '*' --delete '*' --create-cache '*' --destroy-cache '*' --configure-cache '*' --configure-cache-retention '*' </dev/null").strip()
readonly_token = server.succeed("${cmd.atticadm} make-token --sub 'e2e-root' --validity '1 month' --pull 'test' </dev/null").strip()
client.succeed(f"attic login --set-default root http://server:8080 {root_token}")
client.succeed(f"attic login readonly http://server:8080 {readonly_token}")

View file

@ -33,6 +33,6 @@ let
}
];
};
}) (lib.cartesianProductOfSets matrix));
}) (lib.cartesianProduct matrix));
in {
} // basicTests

51
justfile Normal file
View file

@ -0,0 +1,51 @@
set positional-arguments
here := env_var_or_default("JUST_INVOCATION_DIR", invocation_directory())
base := `pwd`
#@echo "here: {{ here }}"
#@echo "base: {{ base }}"
# List available targets
list:
@just --list --unsorted
# Run a command with an alternative Nix version
with-nix version *command:
set -e; \
hook="$(jq -e -r '.[$version].shellHook' --arg version "{{ version }}" < "$NIX_VERSIONS" || (>&2 echo "Version {{ version }} doesn't exist"; exit 1))"; \
eval "$hook"; \
CARGO_TARGET_DIR="{{ base }}/target/nix-{{ version }}" \
{{ command }}
# (CI) Build WebAssembly crates
ci-build-wasm:
#!/usr/bin/env bash
set -euxo pipefail
# https://github.com/rust-lang/rust/issues/122357
export RUST_MIN_STACK=16777216
pushd attic
cargo build --target wasm32-unknown-unknown --no-default-features -F chunking -F stream
popd
pushd token
cargo build --target wasm32-unknown-unknown
popd
# (CI) Run unit tests
ci-unit-tests matrix:
#!/usr/bin/env bash
set -euxo pipefail
system=$(nix-instantiate --eval -E 'builtins.currentSystem')
tests=$(nix build .#internalMatrix."$system".\"{{ matrix }}\".attic-tests --no-link --print-out-paths -L)
find "$tests/bin" -exec {} \;
# (CI) Run rustfmt check
ci-rustfmt:
cargo fmt --check
# (CI) Build and push images
ci-build-and-push-images *args:
.ci/build-and-push-images.sh {{ args }}

View file

@ -36,6 +36,7 @@ let
atticadmWrapper = pkgs.writeShellScriptBin "atticd-atticadm" ''
exec systemd-run \
--quiet \
--pipe \
--pty \
--same-dir \
--wait \

View file

@ -19,7 +19,7 @@ path = "src/adm/main.rs"
doc = false
[dependencies]
attic = { path = "../attic", default-features = false, features = [ "tokio" ] }
attic = { path = "../attic", default-features = false, features = ["chunking", "stream", "tokio"] }
attic-token = { path = "../token" }
anyhow = "1.0.71"
@ -37,7 +37,6 @@ derivative = "2.2.0"
digest = "0.10.7"
displaydoc = "0.2.4"
enum-as-inner = "0.6.0"
fastcdc = "3.0.3"
futures = "0.3.28"
hex = "0.4.3"
http-body-util = "0.1.1"
@ -96,6 +95,3 @@ features = [
"rt-multi-thread",
"sync",
]
[dev-dependencies]
tokio-test = "0.4.2"

View file

@ -18,7 +18,7 @@ use axum::{
Router,
};
use futures::stream::BoxStream;
use http_body_util::BodyExt;
use futures::TryStreamExt as _;
use serde::Serialize;
use tokio_util::io::ReaderStream;
use tracing::instrument;
@ -217,11 +217,11 @@ async fn get_nar(
match storage.download_file_db(remote_file, false).await? {
Download::Url(url) => Ok(Redirect::temporary(&url).into_response()),
Download::AsyncRead(stream) => {
let stream = ReaderStream::new(stream);
let body = Body::from_stream(stream).map_err(|e| {
let stream = ReaderStream::new(stream).map_err(|e| {
tracing::error!("Stream error: {e}");
e
}).into_inner();
});
let body = Body::from_stream(stream);
Ok(body.into_response())
}
@ -254,11 +254,11 @@ async fn get_nar(
// TODO: Make num_prefetch configurable
// The ideal size depends on the average chunk size
let merged = merge_chunks(chunks, streamer, storage, 2);
let body = Body::from_stream(merged).map_err(|e| {
let merged = merge_chunks(chunks, streamer, storage, 2).map_err(|e| {
tracing::error!("Stream error: {e}");
e
}).into_inner();
});
let body = Body::from_stream(merged);
Ok(body.into_response())
}

View file

@ -37,16 +37,16 @@ use attic::api::v1::upload_path::{
UploadPathNarInfo, UploadPathResult, UploadPathResultKind, ATTIC_NAR_INFO,
ATTIC_NAR_INFO_PREAMBLE_SIZE,
};
use attic::chunking::chunk_stream;
use attic::hash::Hash;
use attic::stream::{read_chunk_async, StreamHasher};
use attic::util::Finally;
use crate::chunking::chunk_stream;
use crate::database::entity::cache;
use crate::database::entity::chunk::{self, ChunkState, Entity as Chunk};
use crate::database::entity::chunkref::{self, Entity as ChunkRef};
use crate::database::entity::nar::{self, Entity as Nar, NarState};
use crate::database::entity::object::{self, Entity as Object};
use crate::database::entity::object::{self, Entity as Object, InsertExt};
use crate::database::entity::Json as DbJson;
use crate::database::{AtticDatabase, ChunkGuard, NarGuard};
@ -257,12 +257,6 @@ async fn upload_path_dedup(
.map_err(ServerError::database_error)?;
// Create a mapping granting the local cache access to the NAR
Object::delete_many()
.filter(object::Column::CacheId.eq(cache.id))
.filter(object::Column::StorePathHash.eq(upload_info.store_path_hash.to_string()))
.exec(&txn)
.await
.map_err(ServerError::database_error)?;
Object::insert({
let mut new_object = upload_info.to_active_model();
new_object.cache_id = Set(cache.id);
@ -271,6 +265,7 @@ async fn upload_path_dedup(
new_object.created_by = Set(username);
new_object
})
.on_conflict_do_update()
.exec(&txn)
.await
.map_err(ServerError::database_error)?;
@ -487,12 +482,6 @@ async fn upload_path_new_chunked(
.map_err(ServerError::database_error)?;
// Create a mapping granting the local cache access to the NAR
Object::delete_many()
.filter(object::Column::CacheId.eq(cache.id))
.filter(object::Column::StorePathHash.eq(upload_info.store_path_hash.to_string()))
.exec(&txn)
.await
.map_err(ServerError::database_error)?;
Object::insert({
let mut new_object = upload_info.to_active_model();
new_object.cache_id = Set(cache.id);
@ -501,6 +490,7 @@ async fn upload_path_new_chunked(
new_object.created_by = Set(username);
new_object
})
.on_conflict_do_update()
.exec(&txn)
.await
.map_err(ServerError::database_error)?;
@ -594,12 +584,6 @@ async fn upload_path_new_unchunked(
.map_err(ServerError::database_error)?;
// Create a mapping granting the local cache access to the NAR
Object::delete_many()
.filter(object::Column::CacheId.eq(cache.id))
.filter(object::Column::StorePathHash.eq(upload_info.store_path_hash.to_string()))
.exec(&txn)
.await
.map_err(ServerError::database_error)?;
Object::insert({
let mut new_object = upload_info.to_active_model();
new_object.cache_id = Set(cache.id);
@ -608,6 +592,7 @@ async fn upload_path_new_unchunked(
new_object.created_by = Set(username);
new_object
})
.on_conflict_do_update()
.exec(&txn)
.await
.map_err(ServerError::database_error)?;

View file

@ -6,6 +6,8 @@ use std::path::PathBuf;
use std::str::FromStr;
use sea_orm::entity::prelude::*;
use sea_orm::sea_query::OnConflict;
use sea_orm::Insert;
use super::nar::NarModel;
use super::Json;
@ -15,6 +17,10 @@ use attic::hash::Hash;
pub type ObjectModel = Model;
pub trait InsertExt {
fn on_conflict_do_update(self) -> Self;
}
/// An object in a binary cache.
#[derive(Debug, Clone, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "object")]
@ -87,6 +93,27 @@ pub enum Relation {
Nar,
}
impl InsertExt for Insert<ActiveModel> {
fn on_conflict_do_update(self) -> Self {
self.on_conflict(
OnConflict::columns([Column::CacheId, Column::StorePathHash])
.update_columns([
Column::NarId,
Column::StorePath,
Column::References,
Column::System,
Column::Deriver,
Column::Sigs,
Column::Ca,
Column::CreatedAt,
Column::LastAccessedAt,
Column::CreatedBy,
])
.to_owned(),
)
}
}
impl Model {
/// Converts this object to a NarInfo.
pub fn to_nar_info(&self, nar: &NarModel) -> ServerResult<NarInfo> {

View file

@ -159,12 +159,12 @@ async fn run_reap_orphan_chunks(state: &State) -> Result<()> {
let storage = state.storage().await?;
let orphan_chunk_limit = match db.get_database_backend() {
// Arbitrarily chosen sensible value since there's no good default to choose from for MySQL
sea_orm::DatabaseBackend::MySql => 1000,
// Arbitrarily chosen sensible value since there's no good default to choose from for MySQL
sea_orm::DatabaseBackend::MySql => 1000,
// Panic limit set by sqlx for postgresql: https://github.com/launchbadge/sqlx/issues/671#issuecomment-687043510
sea_orm::DatabaseBackend::Postgres => u64::from(u16::MAX),
// Default statement limit imposed by sqlite: https://www.sqlite.org/limits.html#max_variable_number
sea_orm::DatabaseBackend::Sqlite => 500,
// Default statement limit imposed by sqlite: https://www.sqlite.org/limits.html#max_variable_number
sea_orm::DatabaseBackend::Sqlite => 500,
};
// find all orphan chunks...

View file

@ -15,7 +15,6 @@
pub mod access;
mod api;
mod chunking;
pub mod config;
pub mod database;
pub mod error;

View file

@ -79,8 +79,8 @@ async fn upgrade_0_to_1(storage_path: &Path) -> ServerResult<()> {
let name = file.file_name();
let name_bytes = name.as_os_str().as_bytes();
let parents = storage_path
.join(OsStr::from_bytes(&name_bytes[0..1]))
.join(OsStr::from_bytes(&name_bytes[0..2]));
.join(OsStr::from_bytes(&name_bytes[0..1]))
.join(OsStr::from_bytes(&name_bytes[0..2]));
let new_path = parents.join(name);
fs::create_dir_all(&parents).await.map_err(|e| {
ErrorKind::StorageError(anyhow::anyhow!("Failed to create directory {}", e))